From 580d54e9e8c2b5fc516b3549bf84e3b949e7bdde Mon Sep 17 00:00:00 2001 From: Padraig Gleeson Date: Fri, 26 Mar 2021 09:50:07 +0000 Subject: [PATCH 001/136] Bump to v0.2.56 for dev --- neuroml/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/__init__.py b/neuroml/__init__.py index 1a7fb55e..97e344a1 100644 --- a/neuroml/__init__.py +++ b/neuroml/__init__.py @@ -1,6 +1,6 @@ from .nml.nml import * # allows importation of all neuroml classes -__version__ = '0.2.55' +__version__ = '0.2.56' __version_info__ = tuple(int(i) for i in __version__.split('.')) From e1b3651c0dfa3b8dcc2f13889b3135d6f0576acd Mon Sep 17 00:00:00 2001 From: Padraig Gleeson Date: Fri, 26 Mar 2021 16:53:50 +0000 Subject: [PATCH 002/136] Specify version as nml2.2 --- neuroml/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/__init__.py b/neuroml/__init__.py index 97e344a1..7780512c 100644 --- a/neuroml/__init__.py +++ b/neuroml/__init__.py @@ -4,4 +4,4 @@ __version_info__ = tuple(int(i) for i in __version__.split('.')) -current_neuroml_version = "v2.1" +current_neuroml_version = "v2.2" From 39b9c85c7ec22589216a85109c7095aa7cb434aa Mon Sep 17 00:00:00 2001 From: Padraig Gleeson Date: Fri, 26 Mar 2021 17:05:25 +0000 Subject: [PATCH 003/136] Add v2.2 schema file --- neuroml/nml/NeuroML_v2.2.xsd | 3256 ++++++++++++++++++++++++++++++++++ neuroml/nml/README.md | 2 +- 2 files changed, 3257 insertions(+), 1 deletion(-) create mode 100644 neuroml/nml/NeuroML_v2.2.xsd diff --git a/neuroml/nml/NeuroML_v2.2.xsd b/neuroml/nml/NeuroML_v2.2.xsd new file mode 100644 index 00000000..f1e45958 --- /dev/null +++ b/neuroml/nml/NeuroML_v2.2.xsd @@ -0,0 +1,3256 @@ + + + + + + + + + + An id attribute for elements which need to be identified uniquely (normally just within their parent element). + + + + + + + + + + + A value for a physical quantity in NeuroML 2, e.g. 20, -60.0mV or 5nA + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + An id string for pointing to an entry in an annotation element related to a MIRIAM resource. Based on metaid of SBML + + + + + + + + + + + + An id string for pointing to an entry in the NeuroLex ontology. Use of this attribute is a shorthand for a full + RDF based reference to the MIRIAM Resource urn:miriam:neurolex, with an bqbiol:is qualifier + + + + + + + + + + + + + An attribute useful as id of segments, connections, etc: integer >=0 only! + + + + + + + + + + + + + + Integer >=1 only! + + + + + + + + + + + + + + + Double >0 only + + + + + + + + + Value which is either 0 or 1 + + + + + + + + + + + + + + + + + Textual human readable notes related to the element in question. It's useful to put these into + the NeuroML files instead of XML comments, as the notes can be extracted and repeated in the files to which the NeuroML is mapped. + + + + + + + + + Generic property with a tag and value + + + + + + + + + Placeholder for MIRIAM related metadata, among others. + + + + + + + + + + Contains an extension to NeuroML by creating custom LEMS ComponentType. + + + + + + + + + + + + + + + + + + + + + LEMS ComponentType for Constant. + + + + + + + + + + + + + LEMS Exposure (ComponentType property) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LEMS ComponentType for Dynamics + + + + + + + + + + + + + LEMS ComponentType for DerivedVariable + + + + + + + + + + + + + + + + + + + + + LEMS ComponentType for ConditionalDerivedVariable + + + + + + + + + + + + + + + + + + + + + + + Float value restricted to between 1 and 0 + + + + + + + + + + + + + + + + The root NeuroML element. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Various types of cells which are defined in NeuroML 2. This list will be expanded... + + + + + + + + + + + + + + + + + + + + + + + Various types of cells which are defined in NeuroML 2 based on PyNN standard cell models. + + + + + + + + + + + + + + + + + Various types of synapse which are defined in NeuroML 2. This list will be expanded... + + + + + + + + + + + + + + + + + + + + Various types of synapse which are defined in NeuroML 2 based on PyNN standard cell/synapse models. + + + + + + + + + + + + Various types of inputs which are defined in NeuroML2. This list will be expanded... + + + + + + + + + + + + + + + + + + + + + + + + + + Various types of input which are defined in NeuroML 2 based on PyNN standard cell/synapse models. + + + + + + + + + + + Various types of concentration model which are defined in NeuroML 2. This list will be expanded... + + + + + + + + + + + + + + + + + + + + + + + + + + + Kinetic scheme based ion channel. + + + + + + + + + + + + + + + + + + + + + Note ionChannel and ionChannelHH are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. + One of these should be removed, probably ionChannelHH. + + + + + + + + + + + + + + + + + + + + + + + + + + + Note ionChannel and ionChannelHH are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. + One of these should be removed, probably ionChannelHH. + + + + + + + + + Same as ionChannel, but with a vShift parameter to change voltage activation of gates. The exact usage of vShift in expressions for rates is determined by the individual gates. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Note all sub elements for gateHHrates, gateHHratesTau, gateFractional etc. allowed here. Which are valid should be constrained by what type is set + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Should not be required, as it's present on the species element! + + + + + + + + + + + + + + + + + + Should not be required, as it's present on the species element! + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Gap junction/single electrical connection + + + + + + + + + + + + + Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection (continuousConnection). + + + + + + + + + + + + + Behaves just like a one way gap junction. + + + + + + + + + + + + + + + Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Should only be used if morphology element is outside the cell. + This points to the id of the morphology + + + + + + + Should only be used if biophysicalProperties element is outside the cell. + This points to the id of the biophysicalProperties + + + + + + + + + + + + + + + + + + + + + + + + + + Standalone element which is usually inside a single cell, but could be outside and + referenced by id. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A 3D point with diameter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Allowed metrics for InhomogeneousParam + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Standalone element which is usually inside a single cell, but could be outside and + referenced by id. + + + + + + + + + + + + + + + + + + + Standalone element which is usually inside a single cell, but could be outside and + referenced by id. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Using a thin extension of ValueAcrossSegOrSegGroup to facilitate library generation (e.g. libNeuroML) + + + + + + + + + + Using a thin extension of ValueAcrossSegOrSegGroup to facilitate library generation (e.g. libNeuroML) + + + + + + + + + + Using a thin extension of ValueAcrossSegOrSegGroup to facilitate library generation (e.g. libNeuroML) + + + + + + + + + + Using a thin extension of ValueAcrossSegOrSegGroup to facilitate library generation (e.g. libNeuroML) + + + + + + + + + + + + + + + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + + + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + + + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + + + + + + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now + until LEMS implementation can select by id. TODO: remove. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Generates a constant current pulse of a certain amplitude (with dimensions for current) for a specified duration after a delay. + + + + + + + + + + + + + Generates a constant current pulse of a certain amplitude (non dimensional) for a specified duration after a delay. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Single explicit connection. Introduced to test connections in LEMS. Will probably be removed in favour of + connections wrapped in projection element + + + + + + + + + + + Base for projection (set of synaptic connections) between two populations + + + + + + + + + + + + Projection (set of synaptic connections) between two populations. Chemical/event based synaptic transmission + + + + + + + + + + + + + + + + Base of all synaptic connections (chemical/electrical/analog, etc.) inside projections + + + + + + + + + + + + Base of all synaptic connections with preCellId, postSegmentId, etc. + Note: this is not the best name for these attributes, since Id is superfluous, hence BaseConnectionNewFormat + + + + + + + + + + + + + + + + Base of all synaptic connections with preCell, postSegment, etc. + See BaseConnectionOldFormat + + + + + + + + + + + + + + + + + Individual chemical (event based) synaptic connection, weight==1 and no delay + + + + + + + + + + Individual synaptic connection with weight and delay + + + + + + + + + + + + + Projection between two populations consisting of electrical connections (gap junctions) + + + + + + + + + + + + + + + + Individual electrical synaptic connection + + + + + + + + + + + + Projection between two populations consisting of analog connections (e.g. graded synapses) + + + + + + + + + Projection between two populations consisting of analog connections (e.g. graded synapses). Includes setting of weight for the connection + + + + + + + + + + + + Projection between two populations consisting of analog connections (e.g. graded synapses) + + + + + + + + + + + + + + + + Individual continuous/analog synaptic connection + + + + + + + + + + + + + Individual continuous/analog synaptic connection - instance based + + + + + + + + + + Individual continuous/analog synaptic connection - instance based. Includes setting of _weight for the connection + + + + + + + + + + + + Single explicit input. Introduced to test inputs in LEMS. Will probably be removed in favour of + inputs wrapped in inputList element + + + + + + + + + + List of inputs to a population. Currents will be provided by the specified component. + + + + + + + + + + + + + + + + + Individual input to the cell specified by target + + + + + + + + + + + + Individual input to the cell specified by target. Includes setting of _weight for the connection + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Base element without ID specified *yet*, e.g. for an element with a particular requirement on its id which does not comply with NmlId (e.g. Segment needs nonNegativeInteger). + + + + + + + + + + + Anything which can have a unique (within its parent) id, which must be an integer zero or greater. + + + + + + + + + + + + + + + + Anything which can have a unique (within its parent) id of the form NmlId (spaceless combination of letters, numbers and underscore). + + + + + + + + + + + + + + + + + + Elements which can stand alone and be referenced by id, e.g. cell, morphology. + + + + + + + + + + + + + + + + + + diff --git a/neuroml/nml/README.md b/neuroml/nml/README.md index 9a295246..5eacc607 100644 --- a/neuroml/nml/README.md +++ b/neuroml/nml/README.md @@ -10,7 +10,7 @@ Unit tests should be run to confirm this. generateDS.py should be invoked in this folder (so that generateds_config.py can be located) with the following command (namespace def here will be mended when it's become stable) - generateDS.py -o nml.py --use-getter-setter=none --silence --user-methods=helper_methods NeuroML_v2.1.xsd + generateDS.py -o nml.py --use-getter-setter=none --silence --user-methods=helper_methods NeuroML_v2.2.xsd You may have to add the current folder to your PYTHONPATH, i.e. From f3af4ce5652888394c44dfcced5cb5cb6b1827d9 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 26 Mar 2021 17:10:47 +0000 Subject: [PATCH 004/136] docs: update links in Readme to new docs --- README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e02ec9c7..d033850a 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ Documentation is available at http://readthedocs.org/docs/libneuroml/en/latest/ For installation instructions, see http://readthedocs.org/docs/libneuroml/en/latest/install.html -For an overview of all NeuroML related libraries/documentation/publications see https://neuroml.org/getneuroml. +For an overview of all NeuroML related libraries/documentation/publications see https://docs.neuroml.org ## pyNeuroML @@ -36,7 +36,9 @@ pyNeuroML builds on: [libNeuroML](https://github.com/NeuralEnsemble/libNeuroML) ## Development process for libNeuroML -Most of the work happens in the [development branch](https://github.com/NeuralEnsemble/libNeuroML/tree/development). That branch is kept up to date with the development branches for [NeuroML 2](https://github.com/NeuroML/NeuroML2/tree/development) and related libraries. See https://neuroml.org/getneuroml for an overview of the various NeuroML libraries. +Most of the work happens in the [development branch](https://github.com/NeuralEnsemble/libNeuroML/tree/development). +That branch is kept up to date with the development branches for [NeuroML 2](https://github.com/NeuroML/NeuroML2/tree/development) and related libraries. +See https://docs.neuroml.org/ for an overview of the various NeuroML libraries. ## Changelog From b3fb24afb1d8e8ef61cb7a861b70179de3af2297 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 26 Mar 2021 17:14:26 +0000 Subject: [PATCH 005/136] docs: add 0.2.55 changelog --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index d033850a..9eab7005 100644 --- a/README.md +++ b/README.md @@ -42,6 +42,12 @@ See https://docs.neuroml.org/ for an overview of the various NeuroML libraries. ## Changelog +### version 0.2.55 + + - Patch release with minor changes under the hood. + - Use PyTest for testing. + - Enable CI on GitHub Actions + ### version 0.2.54 - Using Schema for NeuroML v2.1. Better compatibility with Python 3 From 7676c396f845160e33b71c025429613004e58e1e Mon Sep 17 00:00:00 2001 From: Padraig Gleeson Date: Fri, 9 Apr 2021 09:24:59 +0100 Subject: [PATCH 006/136] Regenerated --- .gitignore | 1 + neuroml/examples/test_files/complete.nml | 2 +- neuroml/examples/test_files/complete.nml.h5 | Bin 69930 -> 69930 bytes neuroml/examples/test_files/testh5.nml | 2 +- 4 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 0284d3ec..c1264a49 100644 --- a/.gitignore +++ b/.gitignore @@ -50,3 +50,4 @@ neuroml/test/*.h5 /mongoo .venv +/data diff --git a/neuroml/examples/test_files/complete.nml b/neuroml/examples/test_files/complete.nml index fa250ae2..c6ae5129 100644 --- a/neuroml/examples/test_files/complete.nml +++ b/neuroml/examples/test_files/complete.nml @@ -1,4 +1,4 @@ - + Lots of notes.... diff --git a/neuroml/examples/test_files/complete.nml.h5 b/neuroml/examples/test_files/complete.nml.h5 index 85ee88f20da02a95a84864aed0211e8e9162825a..f70ae47bbfe936fe6f887577e448343634728ae2 100644 GIT binary patch delta 139 zcmZ3rh-K9xmJKgC8O=7o;_Q}W;xCwd-p_IK4&_)O7(2qOR07D}eBQ521SZyzd>77M wQE^rnCcdKw!9KCzC`f#B{}w+{5Pkn-9he_~w+wE!#D{Zyj7FQE|8o`s040PtiU0rr delta 139 zcmZ3rh-K9xmJKgC8BI37;_Q}Wve2D;-p_IK4&_)O7(2qOR07D}eBQ521SZyzd>77M wQE^rnCcdKw!9KCzC`f#B{}w+{5Pkn-9he_~w+wE!#D{ZyjE0+^|8o`s0Nh$PX#fBK diff --git a/neuroml/examples/test_files/testh5.nml b/neuroml/examples/test_files/testh5.nml index 816ebc5a..bbcbf974 100644 --- a/neuroml/examples/test_files/testh5.nml +++ b/neuroml/examples/test_files/testh5.nml @@ -1,4 +1,4 @@ - + Root notes From 578ba4a5b23fe57074261fad11ee9674cba896c4 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 9 Jun 2021 16:15:43 +0100 Subject: [PATCH 007/136] fix(autodoc): ensure that all classes are included in RTD We now include classes that do not have a doc-string (those that do not have properties in the Schema). Additionally, to keep this page readable, we skip out lots of the validate* methods and members that users should not be using. --- doc/coreclasses.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/coreclasses.rst b/doc/coreclasses.rst index 6abbdd86..af88db24 100644 --- a/doc/coreclasses.rst +++ b/doc/coreclasses.rst @@ -7,8 +7,15 @@ Note: This module is included in the top level of the `neuroml` package, so you from neuroml import AdExIaFCell +.. + To get the list of all the validate* methods and members to add to the exlude list, use this: + grep -Eo "def[[:space:]]*validate([[:alnum:]]|_)*|validate([[:alnum:]]|_)*patterns_" nml.py | sed -e 's/^.*def validate/validate/' | sort -h | uniq | tr '\n' ',' + + .. automodule:: neuroml.nml.nml :members: + :undoc-members: + :exclude-members: build, buildAttributes, buildChildren, exportAttributes, export, exportChildren, factory, hasContent_, member_data_items_, subclass, superclass, warn_count, validate_allowedSpaces,validate_BlockTypes,validate_channelTypes,validate_DoubleGreaterThanZero,validate_gateTypes,validate_MetaId,validate_MetaId_patterns_,validate_Metric,validate_networkTypes,validate_NeuroLexId,validate_NeuroLexId_patterns_,validate_Nml2Quantity,validate_Nml2Quantity_capacitance,validate_Nml2Quantity_capacitance_patterns_,validate_Nml2Quantity_concentration,validate_Nml2Quantity_concentration_patterns_,validate_Nml2Quantity_conductance,validate_Nml2Quantity_conductanceDensity,validate_Nml2Quantity_conductanceDensity_patterns_,validate_Nml2Quantity_conductance_patterns_,validate_Nml2Quantity_conductancePerVoltage,validate_Nml2Quantity_conductancePerVoltage_patterns_,validate_Nml2Quantity_current,validate_Nml2Quantity_currentDensity,validate_Nml2Quantity_currentDensity_patterns_,validate_Nml2Quantity_current_patterns_,validate_Nml2Quantity_length,validate_Nml2Quantity_length_patterns_,validate_Nml2Quantity_none,validate_Nml2Quantity_none_patterns_,validate_Nml2Quantity_patterns_,validate_Nml2Quantity_permeability,validate_Nml2Quantity_permeability_patterns_,validate_Nml2Quantity_pertime,validate_Nml2Quantity_pertime_patterns_,validate_Nml2Quantity_resistance,validate_Nml2Quantity_resistance_patterns_,validate_Nml2Quantity_rhoFactor,validate_Nml2Quantity_rhoFactor_patterns_,validate_Nml2Quantity_specificCapacitance,validate_Nml2Quantity_specificCapacitance_patterns_,validate_Nml2Quantity_temperature,validate_Nml2Quantity_temperature_patterns_,validate_Nml2Quantity_time,validate_Nml2Quantity_time_patterns_,validate_Nml2Quantity_voltage,validate_Nml2Quantity_voltage_patterns_,validate_NmlId,validate_NmlId_patterns_,validate_NonNegativeInteger,validate_Notes,validate_PlasticityTypes,validate_populationTypes,validate_PositiveInteger,validate_ZeroOrOne,validate_ZeroToOne From 8341edc83551dd39789d5e99c217d0b6fa1e44af Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 23 Jun 2021 17:29:51 +0100 Subject: [PATCH 008/136] feat: update 2.2 schema --- neuroml/nml/NeuroML_v2.2.xsd | 80 +++++++++++++++--------------------- 1 file changed, 32 insertions(+), 48 deletions(-) diff --git a/neuroml/nml/NeuroML_v2.2.xsd b/neuroml/nml/NeuroML_v2.2.xsd index f1e45958..3a834a1b 100644 --- a/neuroml/nml/NeuroML_v2.2.xsd +++ b/neuroml/nml/NeuroML_v2.2.xsd @@ -67,6 +67,14 @@ + + + + + + + + @@ -1782,42 +1790,34 @@ - Using a thin extension of ValueAcrossSegOrSegGroup to facilitate library generation (e.g. libNeuroML) + Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction. - - - - + + - Using a thin extension of ValueAcrossSegOrSegGroup to facilitate library generation (e.g. libNeuroML) + Capacitance per unit area - - - - + + - Using a thin extension of ValueAcrossSegOrSegGroup to facilitate library generation (e.g. libNeuroML) + Explicitly set initial membrane potential for the cell - - - - + + - Using a thin extension of ValueAcrossSegOrSegGroup to facilitate library generation (e.g. libNeuroML) + The resistivity, or specific axial resistance, of the cytoplasm - - - - + + @@ -2099,17 +2099,6 @@ - - - - - - - - - - - @@ -2139,26 +2128,21 @@ - - + - + - - - - - Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now - until LEMS implementation can select by id. TODO: remove. - - - - - - + + + Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now + until LEMS implementation can select by id. TODO: remove. + + + - - + + + From 3b25682823069cb093de77a5f0001beb23c70a96 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 23 Jun 2021 17:29:58 +0100 Subject: [PATCH 009/136] feat: regenerate nml.py --- neuroml/nml/config.py | 2 +- neuroml/nml/nml.py | 670 +++++++++++++++++++++--------------------- 2 files changed, 338 insertions(+), 334 deletions(-) diff --git a/neuroml/nml/config.py b/neuroml/nml/config.py index 447f42f1..a07a19bc 100644 --- a/neuroml/nml/config.py +++ b/neuroml/nml/config.py @@ -1 +1 @@ -variables={'schema_name':'NeuroML_v2.1.xsd'} +variables={'schema_name':'NeuroML_v2.2.xsd'} diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 9456bfb3..31ca753e 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,8 +2,8 @@ # -*- coding: utf-8 -*- # -# Generated Fri Mar 26 16:21:07 2021 by generateDS.py version 2.30.11. -# Python 2.7.18 (default, Feb 3 2021, 00:00:00) [GCC 11.0.0 20210130 (Red Hat 11.0.0-0)] +# Generated Wed Jun 23 17:35:13 2021 by generateDS.py version 2.30.11. +# Python 2.7.18 (default, May 19 2021, 00:00:00) [GCC 11.1.1 20210428 (Red Hat 11.1.1-1)] # # Command line options: # ('-o', 'nml.py') @@ -12,10 +12,10 @@ # ('--user-methods', 'helper_methods') # # Command line arguments: -# NeuroML_v2.1.xsd +# NeuroML_v2.2.xsd # # Command line: -# /home/asinha/.local/share/virtualenvs/generateds-2.7/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --silence --user-methods="helper_methods" NeuroML_v2.1.xsd +# /home/asinha/.local/share/virtualenvs/generateds-2.7/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --silence --user-methods="helper_methods" NeuroML_v2.2.xsd # # Current working directory (os.getcwd()): # nml @@ -4386,39 +4386,40 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): # end class MembraneProperties2CaPools -class ValueAcrossSegOrSegGroup(GeneratedsSuper): +class SpikeThresh(GeneratedsSuper): + """Membrane potential at which to emit a spiking event. Note, usually + the spiking event will not be emitted again until the membrane + potential has fallen below this value and rises again to cross + it in a positive direction.""" member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity', 0, 1, {'use': u'optional'}), + MemberSpec_('value', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NmlId', 0, 1, {'use': u'optional'}), ] subclass = None superclass = None - def __init__(self, value=None, segment_groups='all', segments=None, extensiontype_=None, **kwargs_): + def __init__(self, value=None, segment_groups='all', **kwargs_): self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.value = _cast(None, value) self.segment_groups = _cast(None, segment_groups) - self.segments = _cast(None, segments) - self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ValueAcrossSegOrSegGroup) + CurrentSubclassModule_, SpikeThresh) if subclass is not None: return subclass(*args_, **kwargs_) - if ValueAcrossSegOrSegGroup.subclass: - return ValueAcrossSegOrSegGroup.subclass(*args_, **kwargs_) + if SpikeThresh.subclass: + return SpikeThresh.subclass(*args_, **kwargs_) else: - return ValueAcrossSegOrSegGroup(*args_, **kwargs_) + return SpikeThresh(*args_, **kwargs_) factory = staticmethod(factory) - def validate_Nml2Quantity(self, value): - # Validate type Nml2Quantity, a restriction on xs:string. + def validate_Nml2Quantity_voltage(self, value): + # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_patterns_, )) - validate_Nml2Quantity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*([_a-zA-Z0-9])*$']] + self.validate_Nml2Quantity_voltage_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: @@ -4433,8 +4434,8 @@ def hasContent_(self): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ValueAcrossSegOrSegGroup', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ValueAcrossSegOrSegGroup') + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeThresh', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeThresh') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: @@ -4446,28 +4447,312 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='V showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ValueAcrossSegOrSegGroup') + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeThresh') if self.hasContent_(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ValueAcrossSegOrSegGroup', pretty_print=pretty_print) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeThresh', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ValueAcrossSegOrSegGroup'): + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeThresh'): if self.value is not None and 'value' not in already_processed: already_processed.add('value') outfile.write(' value=%s' % (quote_attrib(self.value), )) if self.segment_groups != "all" and 'segment_groups' not in already_processed: already_processed.add('segment_groups') outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') - outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ValueAcrossSegOrSegGroup', fromsubclass_=False, pretty_print=True): + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeThresh', fromsubclass_=False, pretty_print=True): + pass + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('value', node) + if value is not None and 'value' not in already_processed: + already_processed.add('value') + self.value = value + self.validate_Nml2Quantity_voltage(self.value) # validate type Nml2Quantity_voltage + value = find_attr_value_('segmentGroup', node) + if value is not None and 'segmentGroup' not in already_processed: + already_processed.add('segmentGroup') + self.segment_groups = value + self.validate_NmlId(self.segment_groups) # validate type NmlId + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + pass +# end class SpikeThresh + + +class SpecificCapacitance(GeneratedsSuper): + """Capacitance per unit area""" + member_data_items_ = [ + MemberSpec_('value', 'Nml2Quantity_specificCapacitance', 0, 1, {'use': u'optional'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + ] + subclass = None + superclass = None + def __init__(self, value=None, segment_groups='all', **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.value = _cast(None, value) + self.segment_groups = _cast(None, segment_groups) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SpecificCapacitance) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SpecificCapacitance.subclass: + return SpecificCapacitance.subclass(*args_, **kwargs_) + else: + return SpecificCapacitance(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_specificCapacitance(self, value): + # Validate type Nml2Quantity_specificCapacitance, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_specificCapacitance_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_specificCapacitance_patterns_, )) + validate_Nml2Quantity_specificCapacitance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F_per_m2|uF_per_cm2)$']] + def validate_NmlId(self, value): + # Validate type NmlId, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + def hasContent_(self): + if ( + + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecificCapacitance', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpecificCapacitance') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecificCapacitance') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpecificCapacitance', pretty_print=pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpecificCapacitance'): + if self.value is not None and 'value' not in already_processed: + already_processed.add('value') + outfile.write(' value=%s' % (quote_attrib(self.value), )) + if self.segment_groups != "all" and 'segment_groups' not in already_processed: + already_processed.add('segment_groups') + outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecificCapacitance', fromsubclass_=False, pretty_print=True): + pass + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('value', node) + if value is not None and 'value' not in already_processed: + already_processed.add('value') + self.value = value + self.validate_Nml2Quantity_specificCapacitance(self.value) # validate type Nml2Quantity_specificCapacitance + value = find_attr_value_('segmentGroup', node) + if value is not None and 'segmentGroup' not in already_processed: + already_processed.add('segmentGroup') + self.segment_groups = value + self.validate_NmlId(self.segment_groups) # validate type NmlId + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + pass +# end class SpecificCapacitance + + +class InitMembPotential(GeneratedsSuper): + """Explicitly set initial membrane potential for the cell""" + member_data_items_ = [ + MemberSpec_('value', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + ] + subclass = None + superclass = None + def __init__(self, value=None, segment_groups='all', **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.value = _cast(None, value) + self.segment_groups = _cast(None, segment_groups) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, InitMembPotential) + if subclass is not None: + return subclass(*args_, **kwargs_) + if InitMembPotential.subclass: + return InitMembPotential.subclass(*args_, **kwargs_) + else: + return InitMembPotential(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): + # Validate type Nml2Quantity_voltage, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_voltage_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + def validate_NmlId(self, value): + # Validate type NmlId, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + def hasContent_(self): + if ( + + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InitMembPotential', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('InitMembPotential') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InitMembPotential') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InitMembPotential', pretty_print=pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InitMembPotential'): + if self.value is not None and 'value' not in already_processed: + already_processed.add('value') + outfile.write(' value=%s' % (quote_attrib(self.value), )) + if self.segment_groups != "all" and 'segment_groups' not in already_processed: + already_processed.add('segment_groups') + outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InitMembPotential', fromsubclass_=False, pretty_print=True): + pass + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('value', node) + if value is not None and 'value' not in already_processed: + already_processed.add('value') + self.value = value + self.validate_Nml2Quantity_voltage(self.value) # validate type Nml2Quantity_voltage + value = find_attr_value_('segmentGroup', node) + if value is not None and 'segmentGroup' not in already_processed: + already_processed.add('segmentGroup') + self.segment_groups = value + self.validate_NmlId(self.segment_groups) # validate type NmlId + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + pass +# end class InitMembPotential + + +class Resistivity(GeneratedsSuper): + """The resistivity, or specific axial resistance, of the cytoplasm""" + member_data_items_ = [ + MemberSpec_('value', 'Nml2Quantity_resistivity', 0, 1, {'use': u'optional'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + ] + subclass = None + superclass = None + def __init__(self, value=None, segment_groups='all', **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.value = _cast(None, value) + self.segment_groups = _cast(None, segment_groups) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, Resistivity) + if subclass is not None: + return subclass(*args_, **kwargs_) + if Resistivity.subclass: + return Resistivity.subclass(*args_, **kwargs_) + else: + return Resistivity(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_resistivity(self, value): + # Validate type Nml2Quantity_resistivity, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_resistivity_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_resistivity_patterns_, )) + validate_Nml2Quantity_resistivity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm_cm|kohm_cm|ohm_m)$']] + def validate_NmlId(self, value): + # Validate type NmlId, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + def hasContent_(self): + if ( + + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Resistivity', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('Resistivity') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Resistivity') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Resistivity', pretty_print=pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Resistivity'): + if self.value is not None and 'value' not in already_processed: + already_processed.add('value') + outfile.write(' value=%s' % (quote_attrib(self.value), )) + if self.segment_groups != "all" and 'segment_groups' not in already_processed: + already_processed.add('segment_groups') + outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Resistivity', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() @@ -4481,24 +4766,15 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'value' not in already_processed: already_processed.add('value') self.value = value - self.validate_Nml2Quantity(self.value) # validate type Nml2Quantity + self.validate_Nml2Quantity_resistivity(self.value) # validate type Nml2Quantity_resistivity value = find_attr_value_('segmentGroup', node) if value is not None and 'segmentGroup' not in already_processed: already_processed.add('segmentGroup') self.segment_groups = value self.validate_NmlId(self.segment_groups) # validate type NmlId - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') - self.segments = value - self.validate_NmlId(self.segments) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') - self.extensiontype_ = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass -# end class ValueAcrossSegOrSegGroup +# end class Resistivity class VariableParameter(GeneratedsSuper): @@ -4673,7 +4949,7 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): # end class InhomogeneousValue -class Species(ValueAcrossSegOrSegGroup): +class Species(GeneratedsSuper): """Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now until LEMS implementation can select by id. TODO: remove.""" @@ -4683,18 +4959,19 @@ class Species(ValueAcrossSegOrSegGroup): MemberSpec_('ion', 'NmlId', 0, 1, {'use': u'optional'}), MemberSpec_('initial_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), MemberSpec_('initial_ext_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), ] subclass = None - superclass = ValueAcrossSegOrSegGroup - def __init__(self, value=None, segment_groups='all', segments=None, id=None, concentration_model=None, ion=None, initial_concentration=None, initial_ext_concentration=None, **kwargs_): + superclass = None + def __init__(self, id=None, concentration_model=None, ion=None, initial_concentration=None, initial_ext_concentration=None, segment_groups='all', **kwargs_): self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Species, self).__init__(value, segment_groups, segments, **kwargs_) self.id = _cast(None, id) self.concentration_model = _cast(None, concentration_model) self.ion = _cast(None, ion) self.initial_concentration = _cast(None, initial_concentration) self.initial_ext_concentration = _cast(None, initial_ext_concentration) + self.segment_groups = _cast(None, segment_groups) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -4722,7 +4999,7 @@ def validate_Nml2Quantity_concentration(self, value): validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] def hasContent_(self): if ( - super(Species, self).hasContent_() + ): return True else: @@ -4748,7 +5025,6 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Species'): - super(Species, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Species') if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id=%s' % (quote_attrib(self.id), )) @@ -4764,8 +5040,10 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.initial_ext_concentration is not None and 'initial_ext_concentration' not in already_processed: already_processed.add('initial_ext_concentration') outfile.write(' initialExtConcentration=%s' % (quote_attrib(self.initial_ext_concentration), )) + if self.segment_groups != "all" and 'segment_groups' not in already_processed: + already_processed.add('segment_groups') + outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Species', fromsubclass_=False, pretty_print=True): - super(Species, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) pass def build(self, node): already_processed = set() @@ -4800,9 +5078,12 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('initialExtConcentration') self.initial_ext_concentration = value self.validate_Nml2Quantity_concentration(self.initial_ext_concentration) # validate type Nml2Quantity_concentration - super(Species, self).buildAttributes(node, attrs, already_processed) + value = find_attr_value_('segmentGroup', node) + if value is not None and 'segmentGroup' not in already_processed: + already_processed.add('segmentGroup') + self.segment_groups = value + self.validate_NmlId(self.segment_groups) # validate type NmlId def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Species, self).buildChildren(child_, node, nodeName_, True) pass # end class Species @@ -11489,282 +11770,6 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): # end class ChannelPopulation -class Resistivity(ValueAcrossSegOrSegGroup): - """Using a thin extension of ValueAcrossSegOrSegGroup to facilitate - library generation (e.g. libNeuroML)""" - member_data_items_ = [ - ] - subclass = None - superclass = ValueAcrossSegOrSegGroup - def __init__(self, value=None, segment_groups='all', segments=None, **kwargs_): - self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Resistivity, self).__init__(value, segment_groups, segments, **kwargs_) - def factory(*args_, **kwargs_): - if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Resistivity) - if subclass is not None: - return subclass(*args_, **kwargs_) - if Resistivity.subclass: - return Resistivity.subclass(*args_, **kwargs_) - else: - return Resistivity(*args_, **kwargs_) - factory = staticmethod(factory) - def hasContent_(self): - if ( - super(Resistivity, self).hasContent_() - ): - return True - else: - return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Resistivity', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Resistivity') - if imported_ns_def_ is not None: - namespacedef_ = imported_ns_def_ - if pretty_print: - eol_ = '\n' - else: - eol_ = '' - if self.original_tagname_ is not None: - name_ = self.original_tagname_ - showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Resistivity') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Resistivity', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Resistivity'): - super(Resistivity, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Resistivity') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Resistivity', fromsubclass_=False, pretty_print=True): - super(Resistivity, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - pass - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) - for child in node: - nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) - return self - def buildAttributes(self, node, attrs, already_processed): - super(Resistivity, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Resistivity, self).buildChildren(child_, node, nodeName_, True) - pass -# end class Resistivity - - -class InitMembPotential(ValueAcrossSegOrSegGroup): - """Using a thin extension of ValueAcrossSegOrSegGroup to facilitate - library generation (e.g. libNeuroML)""" - member_data_items_ = [ - ] - subclass = None - superclass = ValueAcrossSegOrSegGroup - def __init__(self, value=None, segment_groups='all', segments=None, **kwargs_): - self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(InitMembPotential, self).__init__(value, segment_groups, segments, **kwargs_) - def factory(*args_, **kwargs_): - if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, InitMembPotential) - if subclass is not None: - return subclass(*args_, **kwargs_) - if InitMembPotential.subclass: - return InitMembPotential.subclass(*args_, **kwargs_) - else: - return InitMembPotential(*args_, **kwargs_) - factory = staticmethod(factory) - def hasContent_(self): - if ( - super(InitMembPotential, self).hasContent_() - ): - return True - else: - return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InitMembPotential', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InitMembPotential') - if imported_ns_def_ is not None: - namespacedef_ = imported_ns_def_ - if pretty_print: - eol_ = '\n' - else: - eol_ = '' - if self.original_tagname_ is not None: - name_ = self.original_tagname_ - showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InitMembPotential') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InitMembPotential', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InitMembPotential'): - super(InitMembPotential, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InitMembPotential') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InitMembPotential', fromsubclass_=False, pretty_print=True): - super(InitMembPotential, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - pass - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) - for child in node: - nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) - return self - def buildAttributes(self, node, attrs, already_processed): - super(InitMembPotential, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(InitMembPotential, self).buildChildren(child_, node, nodeName_, True) - pass -# end class InitMembPotential - - -class SpecificCapacitance(ValueAcrossSegOrSegGroup): - """Using a thin extension of ValueAcrossSegOrSegGroup to facilitate - library generation (e.g. libNeuroML)""" - member_data_items_ = [ - ] - subclass = None - superclass = ValueAcrossSegOrSegGroup - def __init__(self, value=None, segment_groups='all', segments=None, **kwargs_): - self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpecificCapacitance, self).__init__(value, segment_groups, segments, **kwargs_) - def factory(*args_, **kwargs_): - if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpecificCapacitance) - if subclass is not None: - return subclass(*args_, **kwargs_) - if SpecificCapacitance.subclass: - return SpecificCapacitance.subclass(*args_, **kwargs_) - else: - return SpecificCapacitance(*args_, **kwargs_) - factory = staticmethod(factory) - def hasContent_(self): - if ( - super(SpecificCapacitance, self).hasContent_() - ): - return True - else: - return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecificCapacitance', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpecificCapacitance') - if imported_ns_def_ is not None: - namespacedef_ = imported_ns_def_ - if pretty_print: - eol_ = '\n' - else: - eol_ = '' - if self.original_tagname_ is not None: - name_ = self.original_tagname_ - showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecificCapacitance') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpecificCapacitance', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpecificCapacitance'): - super(SpecificCapacitance, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecificCapacitance') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecificCapacitance', fromsubclass_=False, pretty_print=True): - super(SpecificCapacitance, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - pass - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) - for child in node: - nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) - return self - def buildAttributes(self, node, attrs, already_processed): - super(SpecificCapacitance, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpecificCapacitance, self).buildChildren(child_, node, nodeName_, True) - pass -# end class SpecificCapacitance - - -class SpikeThresh(ValueAcrossSegOrSegGroup): - """Using a thin extension of ValueAcrossSegOrSegGroup to facilitate - library generation (e.g. libNeuroML)""" - member_data_items_ = [ - ] - subclass = None - superclass = ValueAcrossSegOrSegGroup - def __init__(self, value=None, segment_groups='all', segments=None, **kwargs_): - self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeThresh, self).__init__(value, segment_groups, segments, **kwargs_) - def factory(*args_, **kwargs_): - if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeThresh) - if subclass is not None: - return subclass(*args_, **kwargs_) - if SpikeThresh.subclass: - return SpikeThresh.subclass(*args_, **kwargs_) - else: - return SpikeThresh(*args_, **kwargs_) - factory = staticmethod(factory) - def hasContent_(self): - if ( - super(SpikeThresh, self).hasContent_() - ): - return True - else: - return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeThresh', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeThresh') - if imported_ns_def_ is not None: - namespacedef_ = imported_ns_def_ - if pretty_print: - eol_ = '\n' - else: - eol_ = '' - if self.original_tagname_ is not None: - name_ = self.original_tagname_ - showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeThresh') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeThresh', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeThresh'): - super(SpikeThresh, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeThresh') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeThresh', fromsubclass_=False, pretty_print=True): - super(SpikeThresh, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - pass - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) - for child in node: - nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) - return self - def buildAttributes(self, node, attrs, already_processed): - super(SpikeThresh, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeThresh, self).buildChildren(child_, node, nodeName_, True) - pass -# end class SpikeThresh - - class BiophysicalProperties2CaPools(Standalone): """Standalone element which is usually inside a single cell, but could be outside and referenced by id.""" @@ -19692,8 +19697,8 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): class IonChannel(IonChannelScalable): """Note ionChannel and ionChannelHH are currently functionally identical. This is needed since many existing examples use - ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove - one of these, probably ionChannelHH.""" + ionChannel, some use ionChannelHH. One of these should be + removed, probably ionChannelHH.""" member_data_items_ = [ MemberSpec_('species', 'NmlId', 0, 1, {'use': u'optional'}), MemberSpec_('type', 'channelTypes', 0, 1, {'use': u'optional'}), @@ -22253,8 +22258,8 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): class IonChannelHH(IonChannel): """Note ionChannel and ionChannelHH are currently functionally identical. This is needed since many existing examples use - ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove - one of these, probably ionChannelHH.""" + ionChannel, some use ionChannelHH. One of these should be + removed, probably ionChannelHH.""" member_data_items_ = [ ] subclass = None @@ -24031,7 +24036,6 @@ def main(): "TimedSynapticInput", "TransientPoissonFiringSynapse", "UnstructuredLayout", - "ValueAcrossSegOrSegGroup", "VariableParameter", "VoltageClamp", "VoltageClampTriple", From ddd18108d61ccef148fc4f954e6ccd4bcde78dc2 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 25 Jun 2021 13:12:15 +0100 Subject: [PATCH 010/136] chore(schema): sync with latest snapshot --- neuroml/nml/NeuroML_v2.2.xsd | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/neuroml/nml/NeuroML_v2.2.xsd b/neuroml/nml/NeuroML_v2.2.xsd index 3a834a1b..85d7be99 100644 --- a/neuroml/nml/NeuroML_v2.2.xsd +++ b/neuroml/nml/NeuroML_v2.2.xsd @@ -1792,7 +1792,7 @@ Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction. - + @@ -1800,7 +1800,7 @@ Capacitance per unit area - + @@ -1808,7 +1808,7 @@ Explicitly set initial membrane potential for the cell - + @@ -1816,7 +1816,7 @@ The resistivity, or specific axial resistance, of the cytoplasm - + From 61b64d08da8a1a5c1a4711e32f2a5bf3b5e37a8e Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 25 Jun 2021 13:12:42 +0100 Subject: [PATCH 011/136] chore(nml.py): regenerate for updated schema file --- neuroml/nml/nml.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 31ca753e..e6465d1f 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Wed Jun 23 17:35:13 2021 by generateDS.py version 2.30.11. +# Generated Fri Jun 25 13:12:33 2021 by generateDS.py version 2.30.11. # Python 2.7.18 (default, May 19 2021, 00:00:00) [GCC 11.1.1 20210428 (Red Hat 11.1.1-1)] # # Command line options: @@ -4392,7 +4392,7 @@ class SpikeThresh(GeneratedsSuper): potential has fallen below this value and rises again to cross it in a positive direction.""" member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), + MemberSpec_('value', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), ] subclass = None @@ -4489,7 +4489,7 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): class SpecificCapacitance(GeneratedsSuper): """Capacitance per unit area""" member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity_specificCapacitance', 0, 1, {'use': u'optional'}), + MemberSpec_('value', 'Nml2Quantity_specificCapacitance', 0, 0, {'use': u'required'}), MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), ] subclass = None @@ -4586,7 +4586,7 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): class InitMembPotential(GeneratedsSuper): """Explicitly set initial membrane potential for the cell""" member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), + MemberSpec_('value', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), ] subclass = None @@ -4683,7 +4683,7 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): class Resistivity(GeneratedsSuper): """The resistivity, or specific axial resistance, of the cytoplasm""" member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity_resistivity', 0, 1, {'use': u'optional'}), + MemberSpec_('value', 'Nml2Quantity_resistivity', 0, 0, {'use': u'required'}), MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), ] subclass = None From c55d5db186c165c810696f129e59c0481d99294d Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 25 Jun 2021 11:00:17 +0100 Subject: [PATCH 012/136] feat(nml.py): add helper methods to Cell - get segments matching a string - get segment group by id - get segment groups matching a string --- neuroml/nml/helper_methods.py | 52 +++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index 7c491caa..ce224b3f 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -816,6 +816,26 @@ def get_segment(self, segment_id): raise Exception("Segment with id "+str(segment_id)+" not found in cell "+str(self.id)) + def get_segment_by_substring(self, substring): + # type (str) -> dict + """Get a dictionary of segment IDs and the segment matching the specified substring + + :param substring: substring to match + :type substring: str + :return: dictionary with segment ID as key, and segment as value + :raises Exception: if no segments are found + + """ + segments = {} + if substring: + for segment in self.morphology.segments:: + if substring in segment.id: + segments[segment.id] = segment + if len(segments) == 0: + raise Exception("Segments with id matching "+str(substring)+" not found in cell "+str(self.id)) + return segments + + # Get the proximal point of a segment, even the proximal field is None and # so the proximal point is on the parent (at a point set by fraction_along) def get_actual_proximal(self, segment_id): @@ -1080,6 +1100,38 @@ def get_ordered_segments_in_groups(self, return ord_segs + def get_segment_group(self, sg_id): + # type (str) -> SegmentGroup + """Return the SegmentGroup object for the specified segment group id. + + :param sg_id: id of segment group to find + :type sg_id: str + :returns: SegmentGroup object of specified ID + :raises Exception: if segment group is not found in cell + """ + if sg_id: + for sg in self.morphology.segment_groups: + if sg.id == sg_id: + return sg + + raise Exception("Segment group with id "+str(sg_id)+" not found in cell "+str(self.id)) + + def get_segment_group_by_substring(self, substring): + # type (str) -> dict + """Get a dictionary of segment group IDs and the segment groups matching the specified substring + + :param substring: substring to match + :type substring: str + :return: dictionary with segment group ID as key, and segment group as value + :raises Exception: if no segment groups are not found in cell + """ + sgs = {} + for sg in self.morphology.segment_groups: + if substring in sg.id: + sgs[sg.id] = sg + if len(sgs) == 0: + raise Exception("Segment group with id matching "+str(substring)+" not found in cell "+str(self.id)) + return sgs def summary(self): From dd740c93b61905eb16bc2646fe46835bcfaee783 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 25 Jun 2021 11:01:16 +0100 Subject: [PATCH 013/136] chore: ignore mypy cache --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index c1264a49..84a53868 100644 --- a/.gitignore +++ b/.gitignore @@ -51,3 +51,4 @@ neuroml/test/*.h5 .venv /data +.mypy_cache/ From 9d6f5b38978abe08f62ac342d03e3523c70042af Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 25 Jun 2021 16:13:59 +0100 Subject: [PATCH 014/136] improvement(typing): improve typing hints in helper methods --- neuroml/nml/helper_methods.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index ce224b3f..94376f09 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -802,6 +802,7 @@ def __str__(self): # Get segment object by its id def get_segment(self, segment_id): + # type: (str) -> Segment """Get segment object by its id :param segment_id: ID of segment @@ -817,7 +818,7 @@ def get_segment(self, segment_id): raise Exception("Segment with id "+str(segment_id)+" not found in cell "+str(self.id)) def get_segment_by_substring(self, substring): - # type (str) -> dict + # type: (str) -> dict """Get a dictionary of segment IDs and the segment matching the specified substring :param substring: substring to match @@ -839,7 +840,7 @@ def get_segment_by_substring(self, substring): # Get the proximal point of a segment, even the proximal field is None and # so the proximal point is on the parent (at a point set by fraction_along) def get_actual_proximal(self, segment_id): - + # type: (str) -> Point3DWithDiam """Get the proximal point of a segment. Get the proximal point of a segment, even the proximal field is None @@ -869,6 +870,7 @@ def get_actual_proximal(self, segment_id): return p def get_segment_length(self, segment_id): + # type: (str) -> float """Get the length of the segment. :param segment_id: ID of segment @@ -886,6 +888,7 @@ def get_segment_length(self, segment_id): return length def get_segment_surface_area(self, segment_id): + # type: (str) -> float """Get the surface area of the segment. :param segment_id: ID of the segment @@ -903,6 +906,7 @@ def get_segment_surface_area(self, segment_id): return temp_seg.surface_area def get_segment_volume(self, segment_id): + # type: (str) -> float """Get volume of segment :param segment_id: ID of the segment @@ -919,6 +923,7 @@ def get_segment_volume(self, segment_id): return temp_seg.volume def get_segment_ids_vs_segments(self): + # type: () -> Dict """Get a dictionary of segment IDs and the segments in the cell. :return: dictionary with segment ID as key, and segment as value @@ -933,6 +938,7 @@ def get_segment_ids_vs_segments(self): def get_all_segments_in_group(self, segment_group, assume_all_means_all=True): + # type: (SegmentGroup, bool) -> List[Segment] """Get all the segments in a segment group of the cell. :param segment_group: segment group to get all segments of @@ -979,6 +985,7 @@ def get_ordered_segments_in_groups(self, include_cumulative_lengths=False, include_path_lengths=False, path_length_metric="Path Length from root"): # Only option supported + # type: (List, bool, bool, bool, str) -> Dict """ Get ordered list of segments in specified groups @@ -1101,7 +1108,7 @@ def get_ordered_segments_in_groups(self, return ord_segs def get_segment_group(self, sg_id): - # type (str) -> SegmentGroup + # type: (str) -> SegmentGroup """Return the SegmentGroup object for the specified segment group id. :param sg_id: id of segment group to find @@ -1117,7 +1124,7 @@ def get_segment_group(self, sg_id): raise Exception("Segment group with id "+str(sg_id)+" not found in cell "+str(self.id)) def get_segment_group_by_substring(self, substring): - # type (str) -> dict + # type: (str) -> dict """Get a dictionary of segment group IDs and the segment groups matching the specified substring :param substring: substring to match From a42863c756b0e8e96f1de39016b4cc5e36447378 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 25 Jun 2021 16:15:54 +0100 Subject: [PATCH 015/136] chore(nml.py): regenerate --- neuroml/nml/nml.py | 63 ++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 61 insertions(+), 2 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index e6465d1f..b33bbfe6 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Fri Jun 25 13:12:33 2021 by generateDS.py version 2.30.11. +# Generated Fri Jun 25 16:15:07 2021 by generateDS.py version 2.30.11. # Python 2.7.18 (default, May 19 2021, 00:00:00) [GCC 11.1.1 20210428 (Red Hat 11.1.1-1)] # # Command line options: @@ -17828,6 +17828,7 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): # Get segment object by its id def get_segment(self, segment_id): + # type: (str) -> Segment """Get segment object by its id :param segment_id: ID of segment @@ -17842,10 +17843,30 @@ def get_segment(self, segment_id): raise Exception("Segment with id "+str(segment_id)+" not found in cell "+str(self.id)) + def get_segment_by_substring(self, substring): + # type: (str) -> dict + """Get a dictionary of segment IDs and the segment matching the specified substring + + :param substring: substring to match + :type substring: str + :return: dictionary with segment ID as key, and segment as value + :raises Exception: if no segments are found + + """ + segments = {} + if substring: + for segment in self.morphology.segments:: + if substring in segment.id: + segments[segment.id] = segment + if len(segments) == 0: + raise Exception("Segments with id matching "+str(substring)+" not found in cell "+str(self.id)) + return segments + + # Get the proximal point of a segment, even the proximal field is None and # so the proximal point is on the parent (at a point set by fraction_along) def get_actual_proximal(self, segment_id): - + # type: (str) -> Point3DWithDiam """Get the proximal point of a segment. Get the proximal point of a segment, even the proximal field is None @@ -17875,6 +17896,7 @@ def get_actual_proximal(self, segment_id): return p def get_segment_length(self, segment_id): + # type: (str) -> float """Get the length of the segment. :param segment_id: ID of segment @@ -17892,6 +17914,7 @@ def get_segment_length(self, segment_id): return length def get_segment_surface_area(self, segment_id): + # type: (str) -> float """Get the surface area of the segment. :param segment_id: ID of the segment @@ -17909,6 +17932,7 @@ def get_segment_surface_area(self, segment_id): return temp_seg.surface_area def get_segment_volume(self, segment_id): + # type: (str) -> float """Get volume of segment :param segment_id: ID of the segment @@ -17925,6 +17949,7 @@ def get_segment_volume(self, segment_id): return temp_seg.volume def get_segment_ids_vs_segments(self): + # type: () -> Dict """Get a dictionary of segment IDs and the segments in the cell. :return: dictionary with segment ID as key, and segment as value @@ -17939,6 +17964,7 @@ def get_segment_ids_vs_segments(self): def get_all_segments_in_group(self, segment_group, assume_all_means_all=True): + # type: (SegmentGroup, bool) -> List[Segment] """Get all the segments in a segment group of the cell. :param segment_group: segment group to get all segments of @@ -17985,6 +18011,7 @@ def get_ordered_segments_in_groups(self, include_cumulative_lengths=False, include_path_lengths=False, path_length_metric="Path Length from root"): # Only option supported + # type: (List, bool, bool, bool, str) -> Dict """ Get ordered list of segments in specified groups @@ -18106,6 +18133,38 @@ def get_ordered_segments_in_groups(self, return ord_segs + def get_segment_group(self, sg_id): + # type: (str) -> SegmentGroup + """Return the SegmentGroup object for the specified segment group id. + + :param sg_id: id of segment group to find + :type sg_id: str + :returns: SegmentGroup object of specified ID + :raises Exception: if segment group is not found in cell + """ + if sg_id: + for sg in self.morphology.segment_groups: + if sg.id == sg_id: + return sg + + raise Exception("Segment group with id "+str(sg_id)+" not found in cell "+str(self.id)) + + def get_segment_group_by_substring(self, substring): + # type: (str) -> dict + """Get a dictionary of segment group IDs and the segment groups matching the specified substring + + :param substring: substring to match + :type substring: str + :return: dictionary with segment group ID as key, and segment group as value + :raises Exception: if no segment groups are not found in cell + """ + sgs = {} + for sg in self.morphology.segment_groups: + if substring in sg.id: + sgs[sg.id] = sg + if len(sgs) == 0: + raise Exception("Segment group with id matching "+str(substring)+" not found in cell "+str(self.id)) + return sgs def summary(self): From b3bada8e197aa68248475c47483749c2e085dd9c Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 25 Jun 2021 16:20:16 +0100 Subject: [PATCH 016/136] fix: fix syntax typo --- neuroml/nml/helper_methods.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index 94376f09..dbdac0be 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -829,7 +829,7 @@ def get_segment_by_substring(self, substring): """ segments = {} if substring: - for segment in self.morphology.segments:: + for segment in self.morphology.segments: if substring in segment.id: segments[segment.id] = segment if len(segments) == 0: From 239ae0d8a8da18b2210419225f2d1259c8f67147 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 25 Jun 2021 16:21:23 +0100 Subject: [PATCH 017/136] chore(nml.py): regenerate --- neuroml/nml/nml.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index b33bbfe6..9ac4d577 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Fri Jun 25 16:15:07 2021 by generateDS.py version 2.30.11. +# Generated Fri Jun 25 16:21:11 2021 by generateDS.py version 2.30.11. # Python 2.7.18 (default, May 19 2021, 00:00:00) [GCC 11.1.1 20210428 (Red Hat 11.1.1-1)] # # Command line options: @@ -17855,7 +17855,7 @@ def get_segment_by_substring(self, substring): """ segments = {} if substring: - for segment in self.morphology.segments:: + for segment in self.morphology.segments: if substring in segment.id: segments[segment.id] = segment if len(segments) == 0: From 39fd4acf657ca789ef08bb8b910d7cb8e6f7d420 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 25 Jun 2021 16:57:42 +0100 Subject: [PATCH 018/136] improvement(nml.py): improve helper method names --- neuroml/nml/helper_methods.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index dbdac0be..4d0e3337 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -817,7 +817,7 @@ def get_segment(self, segment_id): raise Exception("Segment with id "+str(segment_id)+" not found in cell "+str(self.id)) - def get_segment_by_substring(self, substring): + def get_segments_by_substring(self, substring): # type: (str) -> dict """Get a dictionary of segment IDs and the segment matching the specified substring @@ -1123,7 +1123,7 @@ def get_segment_group(self, sg_id): raise Exception("Segment group with id "+str(sg_id)+" not found in cell "+str(self.id)) - def get_segment_group_by_substring(self, substring): + def get_segment_groups_by_substring(self, substring): # type: (str) -> dict """Get a dictionary of segment group IDs and the segment groups matching the specified substring From f372389ba0c610e19642a24976808dff8192c8b9 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 25 Jun 2021 16:58:39 +0100 Subject: [PATCH 019/136] chore(nml.py): regenerate --- neuroml/nml/nml.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 9ac4d577..9f8a7cc7 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Fri Jun 25 16:21:11 2021 by generateDS.py version 2.30.11. +# Generated Fri Jun 25 16:58:24 2021 by generateDS.py version 2.30.11. # Python 2.7.18 (default, May 19 2021, 00:00:00) [GCC 11.1.1 20210428 (Red Hat 11.1.1-1)] # # Command line options: @@ -17843,7 +17843,7 @@ def get_segment(self, segment_id): raise Exception("Segment with id "+str(segment_id)+" not found in cell "+str(self.id)) - def get_segment_by_substring(self, substring): + def get_segments_by_substring(self, substring): # type: (str) -> dict """Get a dictionary of segment IDs and the segment matching the specified substring @@ -18149,7 +18149,7 @@ def get_segment_group(self, sg_id): raise Exception("Segment group with id "+str(sg_id)+" not found in cell "+str(self.id)) - def get_segment_group_by_substring(self, substring): + def get_segment_groups_by_substring(self, substring): # type: (str) -> dict """Get a dictionary of segment group IDs and the segment groups matching the specified substring From 67824d2529ace364bb779308f32650740730d5bb Mon Sep 17 00:00:00 2001 From: Padraig Gleeson Date: Mon, 28 Jun 2021 12:46:25 +0100 Subject: [PATCH 020/136] Increment version to 0.2.57 --- neuroml/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/__init__.py b/neuroml/__init__.py index 7780512c..06100b2a 100644 --- a/neuroml/__init__.py +++ b/neuroml/__init__.py @@ -1,6 +1,6 @@ from .nml.nml import * # allows importation of all neuroml classes -__version__ = '0.2.56' +__version__ = '0.2.57' __version_info__ = tuple(int(i) for i in __version__.split('.')) From 1bf6889a838dcddbf64a1a4f971cd8183ad73bcf Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 24 Aug 2021 11:01:42 +0100 Subject: [PATCH 021/136] feat: add text explaining correspondence between schema and core classes Fixes #107. --- doc/coreclasses.rst | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/doc/coreclasses.rst b/doc/coreclasses.rst index af88db24..16d01083 100644 --- a/doc/coreclasses.rst +++ b/doc/coreclasses.rst @@ -1,7 +1,29 @@ :mod:`nml` Module (NeuroML Core classes) ----------------------------------------- -Note: This module is included in the top level of the `neuroml` package, so you can use these classes by importing neuroml: +These NeuroML core classes are Python representations of the Component Types defined in the NeuroML standard at https://docs.neuroml.org/Userdocs/NeuroMLv2.html. +These can be used to build NeuroML models in Python, and these models can then be exported to the standard XML NeuroML representation. +These core classes also contain some utility functions to make it easier for users to carry out common tasks. + +Each NeuroML Component Type is represented here as a Python class. +Due to implementation limitations, whereas NeuroML Component Types use `lower camel case naming `_, the Python classes here use `upper camel case naming `__. +So, for example, the `adExIaFCell` Component Type in the NeuroML schema becomes the `AdExIaFCell` class here, and `expTwoSynapse` becomes the `ExpTwoSynapse` class. + +The `child` and `children` elements that NeuroML Component Types can have are represented in the Python classes as variables. +The variable names, to distinguish them from class names, use `snake case `__. +So for example, the `cell` NeuroML Component Type has a corresponding `Cell` Python class here. +The `biophysicalProperties` child Component Type in `cell` is represented as the `biophysical_properties` list variable in the `Cell` Python class. +The class signatures list all the child/children elements and text fields that the corresponding Component Type possesses. +To again use the `Cell` class as an example, the construction signature is this: + +:: + + class neuroml.nml.nml.Cell(neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, morphology_attr=None, biophysical_properties_attr=None, morphology=None, biophysical_properties=None, extensiontype_=None, **kwargs_) + +As can be seen here, it includes both the `biophysical_properties` and `morphology` child elements as variables. + +Please see the examples in the `NeuroML documentation <>`__ to see usage examples of libNeuroML. +Please also note that this module is also included in the top level of the `neuroml` package, so you can use these classes by importing neuroml: :: @@ -15,6 +37,7 @@ Note: This module is included in the top level of the `neuroml` package, so you .. automodule:: neuroml.nml.nml :members: :undoc-members: + :inherited-members: :exclude-members: build, buildAttributes, buildChildren, exportAttributes, export, exportChildren, factory, hasContent_, member_data_items_, subclass, superclass, warn_count, validate_allowedSpaces,validate_BlockTypes,validate_channelTypes,validate_DoubleGreaterThanZero,validate_gateTypes,validate_MetaId,validate_MetaId_patterns_,validate_Metric,validate_networkTypes,validate_NeuroLexId,validate_NeuroLexId_patterns_,validate_Nml2Quantity,validate_Nml2Quantity_capacitance,validate_Nml2Quantity_capacitance_patterns_,validate_Nml2Quantity_concentration,validate_Nml2Quantity_concentration_patterns_,validate_Nml2Quantity_conductance,validate_Nml2Quantity_conductanceDensity,validate_Nml2Quantity_conductanceDensity_patterns_,validate_Nml2Quantity_conductance_patterns_,validate_Nml2Quantity_conductancePerVoltage,validate_Nml2Quantity_conductancePerVoltage_patterns_,validate_Nml2Quantity_current,validate_Nml2Quantity_currentDensity,validate_Nml2Quantity_currentDensity_patterns_,validate_Nml2Quantity_current_patterns_,validate_Nml2Quantity_length,validate_Nml2Quantity_length_patterns_,validate_Nml2Quantity_none,validate_Nml2Quantity_none_patterns_,validate_Nml2Quantity_patterns_,validate_Nml2Quantity_permeability,validate_Nml2Quantity_permeability_patterns_,validate_Nml2Quantity_pertime,validate_Nml2Quantity_pertime_patterns_,validate_Nml2Quantity_resistance,validate_Nml2Quantity_resistance_patterns_,validate_Nml2Quantity_rhoFactor,validate_Nml2Quantity_rhoFactor_patterns_,validate_Nml2Quantity_specificCapacitance,validate_Nml2Quantity_specificCapacitance_patterns_,validate_Nml2Quantity_temperature,validate_Nml2Quantity_temperature_patterns_,validate_Nml2Quantity_time,validate_Nml2Quantity_time_patterns_,validate_Nml2Quantity_voltage,validate_Nml2Quantity_voltage_patterns_,validate_NmlId,validate_NmlId_patterns_,validate_NonNegativeInteger,validate_Notes,validate_PlasticityTypes,validate_populationTypes,validate_PositiveInteger,validate_ZeroOrOne,validate_ZeroToOne From ddc23cb74bc8678153714e0f4a61f6a13b9961e1 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 24 Aug 2021 11:10:07 +0100 Subject: [PATCH 022/136] feat: correct rst usage --- doc/coreclasses.rst | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/doc/coreclasses.rst b/doc/coreclasses.rst index 16d01083..3d9106ef 100644 --- a/doc/coreclasses.rst +++ b/doc/coreclasses.rst @@ -1,28 +1,28 @@ :mod:`nml` Module (NeuroML Core classes) ----------------------------------------- -These NeuroML core classes are Python representations of the Component Types defined in the NeuroML standard at https://docs.neuroml.org/Userdocs/NeuroMLv2.html. +These NeuroML core classes are Python representations of the Component Types defined in the `NeuroML standard `__ . These can be used to build NeuroML models in Python, and these models can then be exported to the standard XML NeuroML representation. These core classes also contain some utility functions to make it easier for users to carry out common tasks. Each NeuroML Component Type is represented here as a Python class. Due to implementation limitations, whereas NeuroML Component Types use `lower camel case naming `_, the Python classes here use `upper camel case naming `__. -So, for example, the `adExIaFCell` Component Type in the NeuroML schema becomes the `AdExIaFCell` class here, and `expTwoSynapse` becomes the `ExpTwoSynapse` class. +So, for example, the :code:`adExIaFCell` Component Type in the NeuroML schema becomes the :code:`AdExIaFCell` class here, and :code:`expTwoSynapse` becomes the :code:`ExpTwoSynapse` class. -The `child` and `children` elements that NeuroML Component Types can have are represented in the Python classes as variables. +The :code:`child` and :code:`children` elements that NeuroML Component Types can have are represented in the Python classes as variables. The variable names, to distinguish them from class names, use `snake case `__. -So for example, the `cell` NeuroML Component Type has a corresponding `Cell` Python class here. -The `biophysicalProperties` child Component Type in `cell` is represented as the `biophysical_properties` list variable in the `Cell` Python class. +So for example, the :code:`cell` NeuroML Component Type has a corresponding :code:`Cell` Python class here. +The :code:`biophysicalProperties` child Component Type in :code:`cell` is represented as the :code:`biophysical_properties` list variable in the :code:`Cell` Python class. The class signatures list all the child/children elements and text fields that the corresponding Component Type possesses. -To again use the `Cell` class as an example, the construction signature is this: +To again use the :code:`Cell` class as an example, the construction signature is this: :: class neuroml.nml.nml.Cell(neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, morphology_attr=None, biophysical_properties_attr=None, morphology=None, biophysical_properties=None, extensiontype_=None, **kwargs_) -As can be seen here, it includes both the `biophysical_properties` and `morphology` child elements as variables. +As can be seen here, it includes both the :code:`biophysical_properties` and :code:`morphology` child elements as variables. -Please see the examples in the `NeuroML documentation <>`__ to see usage examples of libNeuroML. +Please see the examples in the `NeuroML documentation `__ to see usage examples of libNeuroML. Please also note that this module is also included in the top level of the `neuroml` package, so you can use these classes by importing neuroml: :: @@ -34,6 +34,10 @@ Please also note that this module is also included in the top level of the `neur grep -Eo "def[[:space:]]*validate([[:alnum:]]|_)*|validate([[:alnum:]]|_)*patterns_" nml.py | sed -e 's/^.*def validate/validate/' | sort -h | uniq | tr '\n' ',' + +API documentation +~~~~~~~~~~~~~~~~~ + .. automodule:: neuroml.nml.nml :members: :undoc-members: From 004a1a8df75f871faf014cfd80710b8e31b17a36 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 24 Aug 2021 11:10:22 +0100 Subject: [PATCH 023/136] fix: do not list inherited members It lists all the generateDS functions that do not need to be exposed to NeuroML users. --- doc/coreclasses.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/coreclasses.rst b/doc/coreclasses.rst index 3d9106ef..eac687f5 100644 --- a/doc/coreclasses.rst +++ b/doc/coreclasses.rst @@ -41,7 +41,6 @@ API documentation .. automodule:: neuroml.nml.nml :members: :undoc-members: - :inherited-members: :exclude-members: build, buildAttributes, buildChildren, exportAttributes, export, exportChildren, factory, hasContent_, member_data_items_, subclass, superclass, warn_count, validate_allowedSpaces,validate_BlockTypes,validate_channelTypes,validate_DoubleGreaterThanZero,validate_gateTypes,validate_MetaId,validate_MetaId_patterns_,validate_Metric,validate_networkTypes,validate_NeuroLexId,validate_NeuroLexId_patterns_,validate_Nml2Quantity,validate_Nml2Quantity_capacitance,validate_Nml2Quantity_capacitance_patterns_,validate_Nml2Quantity_concentration,validate_Nml2Quantity_concentration_patterns_,validate_Nml2Quantity_conductance,validate_Nml2Quantity_conductanceDensity,validate_Nml2Quantity_conductanceDensity_patterns_,validate_Nml2Quantity_conductance_patterns_,validate_Nml2Quantity_conductancePerVoltage,validate_Nml2Quantity_conductancePerVoltage_patterns_,validate_Nml2Quantity_current,validate_Nml2Quantity_currentDensity,validate_Nml2Quantity_currentDensity_patterns_,validate_Nml2Quantity_current_patterns_,validate_Nml2Quantity_length,validate_Nml2Quantity_length_patterns_,validate_Nml2Quantity_none,validate_Nml2Quantity_none_patterns_,validate_Nml2Quantity_patterns_,validate_Nml2Quantity_permeability,validate_Nml2Quantity_permeability_patterns_,validate_Nml2Quantity_pertime,validate_Nml2Quantity_pertime_patterns_,validate_Nml2Quantity_resistance,validate_Nml2Quantity_resistance_patterns_,validate_Nml2Quantity_rhoFactor,validate_Nml2Quantity_rhoFactor_patterns_,validate_Nml2Quantity_specificCapacitance,validate_Nml2Quantity_specificCapacitance_patterns_,validate_Nml2Quantity_temperature,validate_Nml2Quantity_temperature_patterns_,validate_Nml2Quantity_time,validate_Nml2Quantity_time_patterns_,validate_Nml2Quantity_voltage,validate_Nml2Quantity_voltage_patterns_,validate_NmlId,validate_NmlId_patterns_,validate_NonNegativeInteger,validate_Notes,validate_PlasticityTypes,validate_populationTypes,validate_PositiveInteger,validate_ZeroOrOne,validate_ZeroToOne From 3ca24070af835d91d26552a31194268e3f478ddd Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 24 Aug 2021 11:15:34 +0100 Subject: [PATCH 024/136] chore: use better section title --- doc/coreclasses.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/coreclasses.rst b/doc/coreclasses.rst index eac687f5..1e1cadd4 100644 --- a/doc/coreclasses.rst +++ b/doc/coreclasses.rst @@ -35,8 +35,8 @@ Please also note that this module is also included in the top level of the `neur -API documentation -~~~~~~~~~~~~~~~~~ +Class documentation +~~~~~~~~~~~~~~~~~~~~ .. automodule:: neuroml.nml.nml :members: From 86460910641445c5b1dea2fbe6adebe0d7c352ab Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 24 Aug 2021 11:57:23 +0100 Subject: [PATCH 025/136] docs(helper_methods): document helper methods --- neuroml/nml/helper_methods.py | 124 +++++++++++++++++++++++++++++++++- 1 file changed, 123 insertions(+), 1 deletion(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index 4d0e3337..c4e48c9a 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -85,6 +85,11 @@ def show(self): source='''\ @property def num_segments(self): + """Get the number of segments included in this cell morphology. + + :returns: number of segments + :rtype: int + """ return len(self.segments) ''', class_names=("Morphology") @@ -95,6 +100,11 @@ def num_segments(self): source='''\ @property def length(self): + """Get the length of the segment. + + :returns: length of the segment + :rtype: float + """ if self.proximal==None: raise Exception('Cannot get length of segment '+str(self.id)+' using the length property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_length(segment_id) on the cell instead.') @@ -127,6 +137,11 @@ def __repr__(self): source='''\ @property def volume(self): + """Get the volume of the segment. + + :returns: volume of segment + :rtype: float + """ from math import pi if self.proximal==None: @@ -159,6 +174,11 @@ def volume(self): @property def surface_area(self): + """Get the surface area of the segment. + + :returns: surface area of segment + :rtype: float + """ from math import pi from math import sqrt @@ -228,6 +248,13 @@ def __repr__(self): return str(self) def distance_to(self, other_3d_point): + """Find the distance between this point and another. + + :param other_3d_point: other 3D point to calculate distance to + :type other_3d_point: Point3DWithDiam + :returns: distance between the two points + :rtype: float + """ a_x = self.x a_y = self.y a_z = self.z @@ -256,35 +283,59 @@ def _get_cell_id(self, id_string): return int(id_string.split('/')[2]) def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell_id) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell_id) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment_id) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment_id) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) def get_pre_info(self): + """Get pre-synaptic information summary""" return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') def get_post_info(self): + """Get post-synaptic information summary""" return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') @@ -307,6 +358,11 @@ def __str__(self): ", weight: "+'PERCENTAGEf' PERCENTAGE (float(self.weight))+", delay: "+'PERCENTAGE.5f' PERCENTAGE (self.get_delay_in_ms())+" ms" def get_delay_in_ms(self): + """Get connection delay in milli seconds + + :returns: connection delay in milli seconds + :rtype: float + """ if 'ms' in self.delay: return float(self.delay[:-2].strip()) elif 's' in self.delay: @@ -343,6 +399,14 @@ def __str__(self): source='''\ def get_weight(self): + """Get the weight of the connection + + If a weight is not set (or is set to None), returns the default value + of 1.0. + + :returns: weight of connection or 1.0 if not set + :rtype: float + """ return float(self.weight) if self.weight!=None else 1.0 @@ -364,10 +428,20 @@ def _get_cell_id(self, id_string): return int(float(id_string)) def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell) @@ -466,27 +540,41 @@ def get_post_cell_id(self): return self._get_cell_id(self.post_cell) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) def get_pre_info(self): + """Get pre-synaptic information summary""" return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') def get_post_info(self): + """Get post-synaptic information summary""" return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') @@ -507,7 +595,6 @@ def __str__(self): instance = MethodSpec(name='instance', source='''\ - def __str__(self): return "Instance "+str(self.id)+ (" at location: "+str(self.location) if self.location else "") @@ -558,14 +645,22 @@ def _get_cell_id(self, id_string): return int(id_string.split('/')[2]) def get_target_cell_id(self): + """Get ID of target cell. """ return self._get_cell_id(self.target) def get_segment_id(self): + """Get the ID of the segment. + Returns 0 if segment_id was not set. + """ return int(self.segment_id) if self.segment_id else 0 def get_fraction_along(self): + """Get fraction along. + + Returns 0.5 is fraction_along was not set. + """ return float(self.fraction_along) if self.fraction_along else 0.5 @@ -584,6 +679,10 @@ def __str__(self): source='''\ def get_weight(self): + """Get weight. + + If weight is not set, the default value of 1.0 is returned. + """ return float(self.weight) if self.weight!=None else 1.0 @@ -603,6 +702,11 @@ def __str__(self): def summary(self, show_includes=True, show_non_network=True): + """Get a pretty-printed summary of the complete NeuroMLDocument. + + This includes information on the various Components included in the + NeuroMLDocument: networks, cells, projections, synapses, and so on. + """ import inspect @@ -729,6 +833,12 @@ def summary(self, show_includes=True, show_non_network=True): warn_count = 0 def get_by_id(self,id): + """Get a component by specifying its ID. + + :param id: id of Component to get + :type id: str + :returns: Component with given ID or None if no Component with provided ID was found + """ if len(id)==0: import inspect callframe = inspect.getouterframes(inspect.currentframe(), 2) @@ -753,6 +863,11 @@ def get_by_id(self,id): return None def append(self,element): + """Append an element + + :param element: element to append + :type element: Object + """ from neuroml.utils import append_to_element append_to_element(self,element) @@ -767,6 +882,12 @@ def append(self,element): warn_count = 0 def get_by_id(self,id): + """Get a component by its ID + + :param id: ID of component to find + :type id: str + :returns: component with specified ID or None if no component with specified ID found + """ all_ids = [] for ms in self.member_data_items_: mlist = self.__getattribute__(ms.name) @@ -1545,6 +1666,7 @@ def __str__(self): source='''\ def exportHdf5(self, h5file, h5Group): + """Export to HDF5 file. """ #print("Exporting %s: "+str(self.id)+" as HDF5") %s '''%(insert,inserts[insert]), From aef06596685f9eb7c4bce4d187a7521542199568 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 24 Aug 2021 11:58:18 +0100 Subject: [PATCH 026/136] docs: document utils.py --- neuroml/utils.py | 97 ++++++++++++++++++++++++++++++++++++------------ 1 file changed, 73 insertions(+), 24 deletions(-) diff --git a/neuroml/utils.py b/neuroml/utils.py index 662714b7..d99ec66b 100644 --- a/neuroml/utils.py +++ b/neuroml/utils.py @@ -11,7 +11,12 @@ def validate_neuroml2(file_name): + # (str) -> None + """Validate a NeuroML document against the NeuroML schema specification. + :param file_name: name of NeuroML file to validate. + :type file_name: str + """ from lxml import etree try: from urllib2 import urlopen # Python 2 @@ -28,8 +33,17 @@ def validate_neuroml2(file_name): return print("It's valid!") + # Return True if .nml file is valid else false def is_valid_neuroml2(file_name): + # (str) -> Bool + """Check if a file is valid NeuroML2. + + :param file_name: name of NeuroML file to check + :type file_name: str + :returns: True if file is valid, False if not. + :rtype: Boolean + """ from lxml import etree try: from urllib2 import urlopen # Python 2 @@ -45,12 +59,23 @@ def is_valid_neuroml2(file_name): def print_summary(nml_file_name): - + """Print a summary of the NeuroML model in the given file. + + :param nml_file_name: name of NeuroML file to print summary of + :type nml_file_name: str + """ print(get_summary(nml_file_name)) def get_summary(nml_file_name): - + # (str) -> str + """Get a summary of the given NeuroML file. + + :param nml_file_name: name of NeuroML file to get summary of + :type nml_file_name: str + :returns: summary of provided file + :rtype: str + """ from neuroml.loaders import read_neuroml2_file nml_doc = read_neuroml2_file(nml_file_name,include_includes=True, verbose=False, optimized=True) @@ -58,7 +83,17 @@ def get_summary(nml_file_name): def add_all_to_document(nml_doc_src, nml_doc_tgt, verbose=False): - + """Add all members of the source NeuroML document to the target NeuroML document. + + :param nml_doc_src: source NeuroML document to copy from + :type nml_doc_src: NeuroMLDocument + :param nml_doc_tgt: target NeuroML document to copy to + :type nml_doc_tgt: NeuroMLDocument + :param verbose: control verbosity of working + :type verbose: bool + + :raises Exception: if a member could not be copied. + """ membs = inspect.getmembers(nml_doc_src) for memb in membs: @@ -81,30 +116,44 @@ def add_all_to_document(nml_doc_src, nml_doc_tgt, verbose=False): raise Exception("Could not add %s from %s to %s"%(entry, nml_doc_src, nml_doc_tgt)) def append_to_element(parent, child): - - import inspect - membs = inspect.getmembers(parent) - #print("Adding %s to element %s"%(child, parent)) - mappings = {} - for mdi in parent.member_data_items_: - mappings[mdi.data_type] = mdi.name - added = False - for memb in membs: - if isinstance(memb[1], list) and not memb[0].endswith('_'): - #print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) - if mappings[child.__class__.__name__] == memb[0]: - for c in getattr(parent, memb[0]): - if c.id == child.id: - added = True - if not added: - getattr(parent, memb[0]).append(child) - #print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) + """Append a child element to a parent Component + + :param parent: parent NeuroML component to add element to + :type parent: Object + :param child: child NeuroML component to be added to parent + :type child: Object + :raises Exception: when the child could not be added to the parent + """ + import inspect + membs = inspect.getmembers(parent) + #print("Adding %s to element %s"%(child, parent)) + mappings = {} + for mdi in parent.member_data_items_: + mappings[mdi.data_type] = mdi.name + added = False + for memb in membs: + if isinstance(memb[1], list) and not memb[0].endswith('_'): + #print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) + if mappings[child.__class__.__name__] == memb[0]: + for c in getattr(parent, memb[0]): + if c.id == child.id: added = True - - if not added: - raise Exception("Could not add %s to %s"%(child, parent)) + if not added: + getattr(parent, memb[0]).append(child) + #print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) + added = True + + if not added: + raise Exception("Could not add %s to %s"%(child, parent)) def has_segment_fraction_info(connections): + """Check if connections include fraction information + + :param connections: list of connection objects + :type connections: list + :returns: True if connections include fragment information, otherwise False + :rtype: Boolean + """ if not connections: return False no_seg_fract_info = True From db309618d509704f2731ea33db224cd6239eaf33 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 24 Aug 2021 12:01:42 +0100 Subject: [PATCH 027/136] refactor(utils.py): minor cosmetic changes Remove extra white spaces, remove duplicate and unneeded imports. --- neuroml/utils.py | 42 +++++++++++++++++------------------------- 1 file changed, 17 insertions(+), 25 deletions(-) diff --git a/neuroml/utils.py b/neuroml/utils.py index d99ec66b..07c9abdf 100644 --- a/neuroml/utils.py +++ b/neuroml/utils.py @@ -5,9 +5,9 @@ """ import os.path import sys +import inspect import neuroml -import inspect def validate_neuroml2(file_name): @@ -18,13 +18,9 @@ def validate_neuroml2(file_name): :type file_name: str """ from lxml import etree - try: - from urllib2 import urlopen # Python 2 - except: - from urllib.request import urlopen # Python 3 - + xsd_file = os.path.join(os.path.dirname(__file__), 'nml/NeuroML_%s.xsd'%neuroml.current_neuroml_version) - + with open(xsd_file) as schema_file: xmlschema = etree.XMLSchema(etree.parse(schema_file)) print("Validating %s against %s" %(file_name, xsd_file)) @@ -34,7 +30,6 @@ def validate_neuroml2(file_name): print("It's valid!") -# Return True if .nml file is valid else false def is_valid_neuroml2(file_name): # (str) -> Bool """Check if a file is valid NeuroML2. @@ -45,10 +40,6 @@ def is_valid_neuroml2(file_name): :rtype: Boolean """ from lxml import etree - try: - from urllib2 import urlopen # Python 2 - except: - from urllib.request import urlopen # Python 3 xsd_file = os.path.join(os.path.dirname(__file__), 'nml/NeuroML_%s.xsd' % neuroml.current_neuroml_version) @@ -65,8 +56,8 @@ def print_summary(nml_file_name): :type nml_file_name: str """ print(get_summary(nml_file_name)) - - + + def get_summary(nml_file_name): # (str) -> str """Get a summary of the given NeuroML file. @@ -78,10 +69,10 @@ def get_summary(nml_file_name): """ from neuroml.loaders import read_neuroml2_file nml_doc = read_neuroml2_file(nml_file_name,include_includes=True, verbose=False, optimized=True) - + return nml_doc.summary(show_includes=False) - + def add_all_to_document(nml_doc_src, nml_doc_tgt, verbose=False): """Add all members of the source NeuroML document to the target NeuroML document. @@ -101,7 +92,7 @@ def add_all_to_document(nml_doc_src, nml_doc_tgt, verbose=False): and not memb[0].endswith('_'): for entry in memb[1]: if memb[0] != 'includes': - + added = False for c in getattr(nml_doc_tgt, memb[0]): if hasattr(c,'id') and c.id == entry.id: @@ -111,10 +102,11 @@ def add_all_to_document(nml_doc_src, nml_doc_tgt, verbose=False): # %(entry.id if hasattr(entry,'id') else entry.name, memb[0])) getattr(nml_doc_tgt, memb[0]).append(entry) added = True - + if not added: raise Exception("Could not add %s from %s to %s"%(entry, nml_doc_src, nml_doc_tgt)) - + + def append_to_element(parent, child): """Append a child element to a parent Component @@ -124,7 +116,6 @@ def append_to_element(parent, child): :type child: Object :raises Exception: when the child could not be added to the parent """ - import inspect membs = inspect.getmembers(parent) #print("Adding %s to element %s"%(child, parent)) mappings = {} @@ -142,10 +133,11 @@ def append_to_element(parent, child): getattr(parent, memb[0]).append(child) #print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) added = True - + if not added: raise Exception("Could not add %s to %s"%(child, parent)) - + + def has_segment_fraction_info(connections): """Check if connections include fraction information @@ -165,14 +157,14 @@ def has_segment_fraction_info(connections): #print("Checked connections: [%s,...], no_seg_fract_info: %s"%(connections[0],no_seg_fract_info)) return not no_seg_fract_info - + def main(): if len(sys.argv)!=2: print("Please specify the name of the NeuroML2 file...") exit(1) - + print_summary(sys.argv[1]) + if __name__ == '__main__': main() - From fbd378461a2d0c69420c9ce2f2ff53986b5fa35d Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 24 Aug 2021 12:07:08 +0100 Subject: [PATCH 028/136] chore(nml.py): regenerate to incorporate new helper function docs --- neuroml/nml/nml.py | 163 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 161 insertions(+), 2 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 9f8a7cc7..45494f14 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Fri Jun 25 16:58:24 2021 by generateDS.py version 2.30.11. +# Generated Tue Aug 24 12:06:28 2021 by generateDS.py version 2.30.11. # Python 2.7.18 (default, May 19 2021, 00:00:00) [GCC 11.1.1 20210428 (Red Hat 11.1.1-1)] # # Command line options: @@ -3496,6 +3496,13 @@ def __repr__(self): return str(self) def distance_to(self, other_3d_point): + """Find the distance between this point and another. + + :param other_3d_point: other 3D point to calculate distance to + :type other_3d_point: Point3DWithDiam + :returns: distance between the two points + :rtype: float + """ a_x = self.x a_y = self.y a_z = self.z @@ -5980,7 +5987,6 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): self.location = obj_ obj_.original_tagname_ = 'location' - def __str__(self): return "Instance "+str(self.id)+ (" at location: "+str(self.location) if self.location else "") @@ -6333,14 +6339,22 @@ def _get_cell_id(self, id_string): return int(id_string.split('/')[2]) def get_target_cell_id(self): + """Get ID of target cell. """ return self._get_cell_id(self.target) def get_segment_id(self): + """Get the ID of the segment. + Returns 0 if segment_id was not set. + """ return int(self.segment_id) if self.segment_id else 0 def get_fraction_along(self): + """Get fraction along. + + Returns 0.5 is fraction_along was not set. + """ return float(self.fraction_along) if self.fraction_along else 0.5 @@ -6529,14 +6543,22 @@ def _get_cell_id(self, id_string): return int(id_string.split('/')[2]) def get_target_cell_id(self): + """Get ID of target cell. """ return self._get_cell_id(self.target) def get_segment_id(self): + """Get the ID of the segment. + Returns 0 if segment_id was not set. + """ return int(self.segment_id) if self.segment_id else 0 def get_fraction_along(self): + """Get fraction along. + + Returns 0.5 is fraction_along was not set. + """ return float(self.fraction_along) if self.fraction_along else 0.5 @@ -6627,6 +6649,10 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass def get_weight(self): + """Get weight. + + If weight is not set, the default value of 1.0 is returned. + """ return float(self.weight) if self.weight!=None else 1.0 @@ -7295,6 +7321,7 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(InputList, self).buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): + """Export to HDF5 file. """ #print("Exporting InputList: "+str(self.id)+" as HDF5") @@ -7785,6 +7812,7 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(Population, self).buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): + """Export to HDF5 file. """ #print("Exporting Population: "+str(self.id)+" as HDF5") @@ -8284,6 +8312,12 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): warn_count = 0 def get_by_id(self,id): + """Get a component by its ID + + :param id: ID of component to find + :type id: str + :returns: component with specified ID or None if no component with specified ID found + """ all_ids = [] for ms in self.member_data_items_: mlist = self.__getattribute__(ms.name) @@ -8308,6 +8342,7 @@ def __str__(self): def exportHdf5(self, h5file, h5Group): + """Export to HDF5 file. """ #print("Exporting Network: "+str(self.id)+" as HDF5") @@ -12390,6 +12425,11 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(Segment, self).buildChildren(child_, node, nodeName_, True) @property def length(self): + """Get the length of the segment. + + :returns: length of the segment + :rtype: float + """ if self.proximal==None: raise Exception('Cannot get length of segment '+str(self.id)+' using the length property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_length(segment_id) on the cell instead.') @@ -12416,6 +12456,11 @@ def __repr__(self): @property def volume(self): + """Get the volume of the segment. + + :returns: volume of segment + :rtype: float + """ from math import pi if self.proximal==None: @@ -12439,6 +12484,11 @@ def volume(self): @property def surface_area(self): + """Get the surface area of the segment. + + :returns: surface area of segment + :rtype: float + """ from math import pi from math import sqrt @@ -12560,6 +12610,11 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(Morphology, self).buildChildren(child_, node, nodeName_, True) @property def num_segments(self): + """Get the number of segments included in this cell morphology. + + :returns: number of segments + :rtype: int + """ return len(self.segments) # end class Morphology @@ -16099,6 +16154,11 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): def summary(self, show_includes=True, show_non_network=True): + """Get a pretty-printed summary of the complete NeuroMLDocument. + + This includes information on the various Components included in the + NeuroMLDocument: networks, cells, projections, synapses, and so on. + """ import inspect @@ -16225,6 +16285,12 @@ def summary(self, show_includes=True, show_non_network=True): warn_count = 0 def get_by_id(self,id): + """Get a component by specifying its ID. + + :param id: id of Component to get + :type id: str + :returns: Component with given ID or None if no Component with provided ID was found + """ if len(id)==0: import inspect callframe = inspect.getouterframes(inspect.currentframe(), 2) @@ -16249,6 +16315,11 @@ def get_by_id(self,id): return None def append(self,element): + """Append an element + + :param element: element to append + :type element: Object + """ from neuroml.utils import append_to_element append_to_element(self,element) @@ -16591,6 +16662,7 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ContinuousProjection, self).buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): + """Export to HDF5 file. """ #print("Exporting ContinuousProjection: "+str(self.id)+" as HDF5") @@ -16784,6 +16856,7 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ElectricalProjection, self).buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): + """Export to HDF5 file. """ #print("Exporting ElectricalProjection: "+str(self.id)+" as HDF5") @@ -17301,6 +17374,7 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(Projection, self).buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): + """Export to HDF5 file. """ #print("Exporting Projection: "+str(self.id)+" as HDF5") @@ -20714,27 +20788,41 @@ def get_post_cell_id(self): return self._get_cell_id(self.post_cell) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) def get_pre_info(self): + """Get pre-synaptic information summary""" return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') def get_post_info(self): + """Get post-synaptic information summary""" return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') @@ -20843,10 +20931,20 @@ def _get_cell_id(self, id_string): return int(float(id_string)) def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell) @@ -20986,35 +21084,59 @@ def _get_cell_id(self, id_string): return int(id_string.split('/')[2]) def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell_id) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell_id) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment_id) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment_id) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) def get_pre_info(self): + """Get pre-synaptic information summary""" return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') def get_post_info(self): + """Get post-synaptic information summary""" return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') @@ -21028,6 +21150,11 @@ def __str__(self): return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", weight: "+'%f' % (float(self.weight))+", delay: "+'%.5f' % (self.get_delay_in_ms())+" ms" def get_delay_in_ms(self): + """Get connection delay in milli seconds + + :returns: connection delay in milli seconds + :rtype: float + """ if 'ms' in self.delay: return float(self.delay[:-2].strip()) elif 's' in self.delay: @@ -21110,35 +21237,59 @@ def _get_cell_id(self, id_string): return int(id_string.split('/')[2]) def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell_id) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell_id) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment_id) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment_id) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) def get_pre_info(self): + """Get pre-synaptic information summary""" return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') def get_post_info(self): + """Get post-synaptic information summary""" return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') @@ -23616,6 +23767,14 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass def get_weight(self): + """Get the weight of the connection + + If a weight is not set (or is set to None), returns the default value + of 1.0. + + :returns: weight of connection or 1.0 if not set + :rtype: float + """ return float(self.weight) if self.weight!=None else 1.0 From f4a2a0bfde5adacf763b057c43ee3f3516063162 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 24 Aug 2021 12:09:35 +0100 Subject: [PATCH 029/136] fix: correct docstring indentation --- neuroml/nml/helper_methods.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index c4e48c9a..3a409b60 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -645,7 +645,7 @@ def _get_cell_id(self, id_string): return int(id_string.split('/')[2]) def get_target_cell_id(self): - """Get ID of target cell. """ + """Get ID of target cell. """ return self._get_cell_id(self.target) From 878c3a2a90f84a8c5b75040c94b2e9446af561cf Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 24 Aug 2021 12:10:01 +0100 Subject: [PATCH 030/136] chore(nml.py): regenerate --- neuroml/nml/nml.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 45494f14..71282dad 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Tue Aug 24 12:06:28 2021 by generateDS.py version 2.30.11. +# Generated Tue Aug 24 12:09:50 2021 by generateDS.py version 2.30.11. # Python 2.7.18 (default, May 19 2021, 00:00:00) [GCC 11.1.1 20210428 (Red Hat 11.1.1-1)] # # Command line options: @@ -6339,7 +6339,7 @@ def _get_cell_id(self, id_string): return int(id_string.split('/')[2]) def get_target_cell_id(self): - """Get ID of target cell. """ + """Get ID of target cell. """ return self._get_cell_id(self.target) @@ -6543,7 +6543,7 @@ def _get_cell_id(self, id_string): return int(id_string.split('/')[2]) def get_target_cell_id(self): - """Get ID of target cell. """ + """Get ID of target cell. """ return self._get_cell_id(self.target) From 3795f15303b3495adfd28544f84ec529f6823a88 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 24 Aug 2021 12:15:49 +0100 Subject: [PATCH 031/136] docs(helper_methods): add docstrings to remaining methods --- neuroml/nml/helper_methods.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index 3a409b60..3a4dead0 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -446,27 +446,41 @@ def get_post_cell_id(self): return self._get_cell_id(self.post_cell) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) def get_pre_info(self): + """Get pre-synaptic information summary""" return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') def get_post_info(self): + """Get post-synaptic information summary""" return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') @@ -509,6 +523,10 @@ def __str__(self): source='''\ def get_weight(self): + """Get weight. + + If weight is not set, the default value of 1.0 is returned. + """ return float(self.weight) if self.weight!=None else 1.0 @@ -532,10 +550,20 @@ def _get_cell_id(self, id_string): def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell) From 9b30882f4d64bda6a3cd0c7f86bfc3b72fcb0f61 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 24 Aug 2021 12:16:16 +0100 Subject: [PATCH 032/136] chore(nml.py): regenerate --- neuroml/nml/nml.py | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 71282dad..a51fdf5e 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Tue Aug 24 12:09:50 2021 by generateDS.py version 2.30.11. +# Generated Tue Aug 24 12:16:12 2021 by generateDS.py version 2.30.11. # Python 2.7.18 (default, May 19 2021, 00:00:00) [GCC 11.1.1 20210428 (Red Hat 11.1.1-1)] # # Command line options: @@ -20780,10 +20780,20 @@ def _get_cell_id(self, id_string): def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell) @@ -20949,27 +20959,41 @@ def get_post_cell_id(self): return self._get_cell_id(self.post_cell) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) def get_pre_info(self): + """Get pre-synaptic information summary""" return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') def get_post_info(self): + """Get post-synaptic information summary""" return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') @@ -23675,6 +23699,10 @@ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass def get_weight(self): + """Get weight. + + If weight is not set, the default value of 1.0 is returned. + """ return float(self.weight) if self.weight!=None else 1.0 From 95b709366ef61322926e57cd1a6f9888f0ef960b Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 25 Aug 2021 15:28:35 +0100 Subject: [PATCH 033/136] feat: use pydata theme https://sphinx-themes.org/sample-sites/pydata-sphinx-theme/ --- doc/conf.py | 2 +- doc/requirements.txt | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/conf.py b/doc/conf.py index da0c6f79..5908f30b 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -106,7 +106,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'nature' +html_theme = 'pydata_sphinx_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the diff --git a/doc/requirements.txt b/doc/requirements.txt index ef36addc..2a8df123 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1 +1,2 @@ sphinxcontrib-bibtex +pydata-sphinx-theme From e74b09ea122ef94d34a8d9988ba4aabdd7b29b45 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 25 Aug 2021 17:24:09 +0100 Subject: [PATCH 034/136] feat: switch to pydata theme and generate nml.py docs manually --- doc/api.rst | 13 - doc/conf.py | 25 +- .../2012_06_26_neuroml_with_pymoose.pdf | Bin doc/{ => devdocs}/how_to_contribute.rst | 0 .../implementation_of_bindings.rst | 0 doc/devdocs/index.rst | 12 + doc/{ => devdocs}/meeting_june_2012.rst | 0 doc/{ => devdocs}/nodes_segments_sections.rst | 0 doc/devdocs/regenerate_docs.rst | 8 + doc/helpers/nml-core-docs.py | 48 + doc/index.rst | 37 +- doc/userdocs/api.rst | 16 + doc/{ => userdocs}/arraymorph.rst | 0 doc/{ => userdocs}/bibliography.rst | 0 doc/{ => userdocs}/coreclasses.rst | 12 +- doc/userdocs/coreclasses_list.txt | 1702 +++++++++++++++++ doc/{ => userdocs}/examples.rst | 22 +- doc/userdocs/get-nml-core-docs.sh | 16 + doc/userdocs/index.rst | 12 + doc/{ => userdocs}/install.rst | 0 doc/{ => userdocs}/introduction.rst | 0 doc/{ => userdocs}/loaders.rst | 0 doc/{ => userdocs}/utils.rst | 0 doc/{ => userdocs}/writers.rst | 0 24 files changed, 1854 insertions(+), 69 deletions(-) delete mode 100644 doc/api.rst rename doc/{ => devdocs}/2012_06_26_neuroml_with_pymoose.pdf (100%) rename doc/{ => devdocs}/how_to_contribute.rst (100%) rename doc/{ => devdocs}/implementation_of_bindings.rst (100%) create mode 100644 doc/devdocs/index.rst rename doc/{ => devdocs}/meeting_june_2012.rst (100%) rename doc/{ => devdocs}/nodes_segments_sections.rst (100%) create mode 100644 doc/devdocs/regenerate_docs.rst create mode 100644 doc/helpers/nml-core-docs.py create mode 100644 doc/userdocs/api.rst rename doc/{ => userdocs}/arraymorph.rst (100%) rename doc/{ => userdocs}/bibliography.rst (100%) rename doc/{ => userdocs}/coreclasses.rst (56%) create mode 100644 doc/userdocs/coreclasses_list.txt rename doc/{ => userdocs}/examples.rst (58%) create mode 100644 doc/userdocs/get-nml-core-docs.sh create mode 100644 doc/userdocs/index.rst rename doc/{ => userdocs}/install.rst (100%) rename doc/{ => userdocs}/introduction.rst (100%) rename doc/{ => userdocs}/loaders.rst (100%) rename doc/{ => userdocs}/utils.rst (100%) rename doc/{ => userdocs}/writers.rst (100%) diff --git a/doc/api.rst b/doc/api.rst deleted file mode 100644 index 54487358..00000000 --- a/doc/api.rst +++ /dev/null @@ -1,13 +0,0 @@ -API documentation -================= - -.. toctree:: - :maxdepth: 2 - - coreclasses - loaders - writers - utils - arraymorph - - diff --git a/doc/conf.py b/doc/conf.py index 5908f30b..be4584dc 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -67,6 +67,14 @@ # The full version, including alpha/beta/rc tags. release = version +# autodoc_class_signature = "separated" +autoclass_content = "both" + +# gds specific members to exclude from nml.py doc +autodoc_default_options = { + "exclude-members": "build, buildAttributes, buildChildren, exportAttributes, export, exportChildren, factory, hasContent_, member_data_items_, subclass, superclass, warn_count, validate_allowedSpaces, validate_BlockTypes, validate_channelTypes, validate_DoubleGreaterThanZero, validate_gateTypes, validate_MetaId, validate_MetaId_patterns_, validate_Metric, validate_networkTypes, validate_NeuroLexId, validate_NeuroLexId_patterns_, validate_Nml2Quantity, validate_Nml2Quantity_capacitance, validate_Nml2Quantity_capacitance_patterns_, validate_Nml2Quantity_concentration, validate_Nml2Quantity_concentration_patterns_, validate_Nml2Quantity_conductance, validate_Nml2Quantity_conductanceDensity, validate_Nml2Quantity_conductanceDensity_patterns_, validate_Nml2Quantity_conductance_patterns_, validate_Nml2Quantity_conductancePerVoltage, validate_Nml2Quantity_conductancePerVoltage_patterns_, validate_Nml2Quantity_current, validate_Nml2Quantity_currentDensity, validate_Nml2Quantity_currentDensity_patterns_, validate_Nml2Quantity_current_patterns_, validate_Nml2Quantity_length, validate_Nml2Quantity_length_patterns_, validate_Nml2Quantity_none, validate_Nml2Quantity_none_patterns_, validate_Nml2Quantity_patterns_, validate_Nml2Quantity_permeability, validate_Nml2Quantity_permeability_patterns_, validate_Nml2Quantity_pertime, validate_Nml2Quantity_pertime_patterns_, validate_Nml2Quantity_resistance, validate_Nml2Quantity_resistance_patterns_, validate_Nml2Quantity_rhoFactor, validate_Nml2Quantity_rhoFactor_patterns_, validate_Nml2Quantity_specificCapacitance, validate_Nml2Quantity_specificCapacitance_patterns_, validate_Nml2Quantity_temperature, validate_Nml2Quantity_temperature_patterns_, validate_Nml2Quantity_time, validate_Nml2Quantity_time_patterns_, validate_Nml2Quantity_voltage, validate_Nml2Quantity_voltage_patterns_, validate_NmlId, validate_NmlId_patterns_, validate_NonNegativeInteger, validate_Notes, validate_PlasticityTypes, validate_populationTypes, validate_PositiveInteger, validate_ZeroOrOne, validate_ZeroToOne" +} + # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None @@ -111,7 +119,22 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +html_theme_options = { + "github_url": "https://github.com/NeuralEnsemble/libNeuroML", + "use_edit_page_button": True, + "show_toc_level": 1, + "external_links": [ + { + "name": "NeuroML Documentation", "url": "https://docs.neuroml.org" + }, + ] +} + +html_context = { + "github_user": "NeuralEnsemble", + "github_repo": "libNeuroML", + "github_version": "development", +} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] diff --git a/doc/2012_06_26_neuroml_with_pymoose.pdf b/doc/devdocs/2012_06_26_neuroml_with_pymoose.pdf similarity index 100% rename from doc/2012_06_26_neuroml_with_pymoose.pdf rename to doc/devdocs/2012_06_26_neuroml_with_pymoose.pdf diff --git a/doc/how_to_contribute.rst b/doc/devdocs/how_to_contribute.rst similarity index 100% rename from doc/how_to_contribute.rst rename to doc/devdocs/how_to_contribute.rst diff --git a/doc/implementation_of_bindings.rst b/doc/devdocs/implementation_of_bindings.rst similarity index 100% rename from doc/implementation_of_bindings.rst rename to doc/devdocs/implementation_of_bindings.rst diff --git a/doc/devdocs/index.rst b/doc/devdocs/index.rst new file mode 100644 index 00000000..7d373357 --- /dev/null +++ b/doc/devdocs/index.rst @@ -0,0 +1,12 @@ +Contributing +************ + +.. toctree:: + :maxdepth: 2 + + how_to_contribute + regenerate_docs + implementation_of_bindings + meeting_june_2012 + nodes_segments_sections + diff --git a/doc/meeting_june_2012.rst b/doc/devdocs/meeting_june_2012.rst similarity index 100% rename from doc/meeting_june_2012.rst rename to doc/devdocs/meeting_june_2012.rst diff --git a/doc/nodes_segments_sections.rst b/doc/devdocs/nodes_segments_sections.rst similarity index 100% rename from doc/nodes_segments_sections.rst rename to doc/devdocs/nodes_segments_sections.rst diff --git a/doc/devdocs/regenerate_docs.rst b/doc/devdocs/regenerate_docs.rst new file mode 100644 index 00000000..66e98283 --- /dev/null +++ b/doc/devdocs/regenerate_docs.rst @@ -0,0 +1,8 @@ +Regenerating documentation +========================== + +Please create a virtual environment and use the `requirements.txt` file to install the necessary bits. + +In most cases, running `make html` should be sufficient to regenerate the documentation. +However, if any changes to `nml.py` have been made, the `nml-core-docs.py` file in the `helpers` directory will also need to be run. +This script manually adds each class from `nml.py` to the documentation as a sub-section using the `autoclass` sphinx directive instead of the `automodule` directive which does not allow us to do this. diff --git a/doc/helpers/nml-core-docs.py b/doc/helpers/nml-core-docs.py new file mode 100644 index 00000000..a59aa3c4 --- /dev/null +++ b/doc/helpers/nml-core-docs.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python3 +""" +Enter one line description here. + +File: + +Copyright 2021 Ankur Sinha +Author: Ankur Sinha +""" + + +import textwrap + + +excluded_classes = ['GDSParseError', 'MixedContainer', 'MemberSpec_', + 'BlockTypes', 'Metric', 'PlasticityTypes', 'ZeroOrOne', 'allowedSpaces', + 'channelTypes', 'gateTypes', 'networkTypes', 'populationTypes'] +classes = [] +with open("../../neuroml/nml/nml.py", 'r') as file: + lines = file.readlines() + for line in lines: + if line.startswith("class"): + # get the class signature + aclass = line[len("class "):] + aclass = aclass.split('(')[0].split(':')[0] + if aclass not in excluded_classes: + classes.append(aclass) + + +classes.sort() + +with open("../userdocs/coreclasses_list.txt", 'w') as fwrite: + print(".. Generated using nml-core-docs.py", file=fwrite) + for aclass in classes: + towrite = textwrap.dedent( + """ + {} + {} + + .. autoclass:: neuroml.nml.nml.{} + :members: + :undoc-members: + + """.format( + aclass, "#" * len(aclass), aclass, + ) + ) + print(towrite, file=fwrite) diff --git a/doc/index.rst b/doc/index.rst index 2086e01a..b4899bb8 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -6,44 +6,11 @@ Here you will find information on installing, using, and contributing to libNeur For more information on NeuroML standard, other tools in the NeuroML eco-system, the NeuroML community and how to get in touch with us, please see the documentation at https://docs.neuroml.org. -User documentation -************************ - -.. toctree:: - :maxdepth: 2 - - introduction - install - api - examples - bibliography - - -Developer documentation -************************ - .. toctree:: :maxdepth: 2 - how_to_contribute - implementation_of_bindings - -Events and meetings -************************ - -.. toctree:: - :maxdepth: 1 - - meeting_june_2012 - -Miscellaneous -************************ - -.. toctree:: - :maxdepth: 1 - - nodes_segments_sections - + userdocs/index + devdocs/index Indices and tables ******************* diff --git a/doc/userdocs/api.rst b/doc/userdocs/api.rst new file mode 100644 index 00000000..aa2f6688 --- /dev/null +++ b/doc/userdocs/api.rst @@ -0,0 +1,16 @@ +API documentation +================= + +The libNeuroML API includes the core NeuroML classes and various utilities. +You can find information on these in the pages below. + +.. toctree:: + :maxdepth: 2 + + coreclasses + loaders + writers + utils + arraymorph + + diff --git a/doc/arraymorph.rst b/doc/userdocs/arraymorph.rst similarity index 100% rename from doc/arraymorph.rst rename to doc/userdocs/arraymorph.rst diff --git a/doc/bibliography.rst b/doc/userdocs/bibliography.rst similarity index 100% rename from doc/bibliography.rst rename to doc/userdocs/bibliography.rst diff --git a/doc/coreclasses.rst b/doc/userdocs/coreclasses.rst similarity index 56% rename from doc/coreclasses.rst rename to doc/userdocs/coreclasses.rst index 1e1cadd4..c077feb0 100644 --- a/doc/coreclasses.rst +++ b/doc/userdocs/coreclasses.rst @@ -35,13 +35,7 @@ Please also note that this module is also included in the top level of the `neur -Class documentation -~~~~~~~~~~~~~~~~~~~~ - -.. automodule:: neuroml.nml.nml - :members: - :undoc-members: - :exclude-members: build, buildAttributes, buildChildren, exportAttributes, export, exportChildren, factory, hasContent_, member_data_items_, subclass, superclass, warn_count, validate_allowedSpaces,validate_BlockTypes,validate_channelTypes,validate_DoubleGreaterThanZero,validate_gateTypes,validate_MetaId,validate_MetaId_patterns_,validate_Metric,validate_networkTypes,validate_NeuroLexId,validate_NeuroLexId_patterns_,validate_Nml2Quantity,validate_Nml2Quantity_capacitance,validate_Nml2Quantity_capacitance_patterns_,validate_Nml2Quantity_concentration,validate_Nml2Quantity_concentration_patterns_,validate_Nml2Quantity_conductance,validate_Nml2Quantity_conductanceDensity,validate_Nml2Quantity_conductanceDensity_patterns_,validate_Nml2Quantity_conductance_patterns_,validate_Nml2Quantity_conductancePerVoltage,validate_Nml2Quantity_conductancePerVoltage_patterns_,validate_Nml2Quantity_current,validate_Nml2Quantity_currentDensity,validate_Nml2Quantity_currentDensity_patterns_,validate_Nml2Quantity_current_patterns_,validate_Nml2Quantity_length,validate_Nml2Quantity_length_patterns_,validate_Nml2Quantity_none,validate_Nml2Quantity_none_patterns_,validate_Nml2Quantity_patterns_,validate_Nml2Quantity_permeability,validate_Nml2Quantity_permeability_patterns_,validate_Nml2Quantity_pertime,validate_Nml2Quantity_pertime_patterns_,validate_Nml2Quantity_resistance,validate_Nml2Quantity_resistance_patterns_,validate_Nml2Quantity_rhoFactor,validate_Nml2Quantity_rhoFactor_patterns_,validate_Nml2Quantity_specificCapacitance,validate_Nml2Quantity_specificCapacitance_patterns_,validate_Nml2Quantity_temperature,validate_Nml2Quantity_temperature_patterns_,validate_Nml2Quantity_time,validate_Nml2Quantity_time_patterns_,validate_Nml2Quantity_voltage,validate_Nml2Quantity_voltage_patterns_,validate_NmlId,validate_NmlId_patterns_,validate_NonNegativeInteger,validate_Notes,validate_PlasticityTypes,validate_populationTypes,validate_PositiveInteger,validate_ZeroOrOne,validate_ZeroToOne - - +List of Component classes +~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. include:: coreclasses_list.txt diff --git a/doc/userdocs/coreclasses_list.txt b/doc/userdocs/coreclasses_list.txt new file mode 100644 index 00000000..f7f17054 --- /dev/null +++ b/doc/userdocs/coreclasses_list.txt @@ -0,0 +1,1702 @@ +.. Generated using nml-core-docs.py + +AdExIaFCell +########### + +.. autoclass:: neuroml.nml.nml.AdExIaFCell + :members: + :undoc-members: + + + +AlphaCondSynapse +################ + +.. autoclass:: neuroml.nml.nml.AlphaCondSynapse + :members: + :undoc-members: + + + +AlphaCurrSynapse +################ + +.. autoclass:: neuroml.nml.nml.AlphaCurrSynapse + :members: + :undoc-members: + + + +AlphaCurrentSynapse +################### + +.. autoclass:: neuroml.nml.nml.AlphaCurrentSynapse + :members: + :undoc-members: + + + +AlphaSynapse +############ + +.. autoclass:: neuroml.nml.nml.AlphaSynapse + :members: + :undoc-members: + + + +Annotation +########## + +.. autoclass:: neuroml.nml.nml.Annotation + :members: + :undoc-members: + + + +Base +#### + +.. autoclass:: neuroml.nml.nml.Base + :members: + :undoc-members: + + + +BaseCell +######## + +.. autoclass:: neuroml.nml.nml.BaseCell + :members: + :undoc-members: + + + +BaseCellMembPotCap +################## + +.. autoclass:: neuroml.nml.nml.BaseCellMembPotCap + :members: + :undoc-members: + + + +BaseConductanceBasedSynapse +########################### + +.. autoclass:: neuroml.nml.nml.BaseConductanceBasedSynapse + :members: + :undoc-members: + + + +BaseConductanceBasedSynapseTwo +############################## + +.. autoclass:: neuroml.nml.nml.BaseConductanceBasedSynapseTwo + :members: + :undoc-members: + + + +BaseConnection +############## + +.. autoclass:: neuroml.nml.nml.BaseConnection + :members: + :undoc-members: + + + +BaseConnectionNewFormat +####################### + +.. autoclass:: neuroml.nml.nml.BaseConnectionNewFormat + :members: + :undoc-members: + + + +BaseConnectionOldFormat +####################### + +.. autoclass:: neuroml.nml.nml.BaseConnectionOldFormat + :members: + :undoc-members: + + + +BaseCurrentBasedSynapse +####################### + +.. autoclass:: neuroml.nml.nml.BaseCurrentBasedSynapse + :members: + :undoc-members: + + + +BaseNonNegativeIntegerId +######################## + +.. autoclass:: neuroml.nml.nml.BaseNonNegativeIntegerId + :members: + :undoc-members: + + + +BaseProjection +############## + +.. autoclass:: neuroml.nml.nml.BaseProjection + :members: + :undoc-members: + + + +BasePynnSynapse +############### + +.. autoclass:: neuroml.nml.nml.BasePynnSynapse + :members: + :undoc-members: + + + +BaseSynapse +########### + +.. autoclass:: neuroml.nml.nml.BaseSynapse + :members: + :undoc-members: + + + +BaseVoltageDepSynapse +##################### + +.. autoclass:: neuroml.nml.nml.BaseVoltageDepSynapse + :members: + :undoc-members: + + + +BaseWithoutId +############# + +.. autoclass:: neuroml.nml.nml.BaseWithoutId + :members: + :undoc-members: + + + +BiophysicalProperties +##################### + +.. autoclass:: neuroml.nml.nml.BiophysicalProperties + :members: + :undoc-members: + + + +BiophysicalProperties2CaPools +############################# + +.. autoclass:: neuroml.nml.nml.BiophysicalProperties2CaPools + :members: + :undoc-members: + + + +BlockMechanism +############## + +.. autoclass:: neuroml.nml.nml.BlockMechanism + :members: + :undoc-members: + + + +BlockingPlasticSynapse +###################### + +.. autoclass:: neuroml.nml.nml.BlockingPlasticSynapse + :members: + :undoc-members: + + + +Case +#### + +.. autoclass:: neuroml.nml.nml.Case + :members: + :undoc-members: + + + +Cell +#### + +.. autoclass:: neuroml.nml.nml.Cell + :members: + :undoc-members: + + + +Cell2CaPools +############ + +.. autoclass:: neuroml.nml.nml.Cell2CaPools + :members: + :undoc-members: + + + +CellSet +####### + +.. autoclass:: neuroml.nml.nml.CellSet + :members: + :undoc-members: + + + +ChannelDensity +############## + +.. autoclass:: neuroml.nml.nml.ChannelDensity + :members: + :undoc-members: + + + +ChannelDensityGHK +################# + +.. autoclass:: neuroml.nml.nml.ChannelDensityGHK + :members: + :undoc-members: + + + +ChannelDensityGHK2 +################## + +.. autoclass:: neuroml.nml.nml.ChannelDensityGHK2 + :members: + :undoc-members: + + + +ChannelDensityNernst +#################### + +.. autoclass:: neuroml.nml.nml.ChannelDensityNernst + :members: + :undoc-members: + + + +ChannelDensityNernstCa2 +####################### + +.. autoclass:: neuroml.nml.nml.ChannelDensityNernstCa2 + :members: + :undoc-members: + + + +ChannelDensityNonUniform +######################## + +.. autoclass:: neuroml.nml.nml.ChannelDensityNonUniform + :members: + :undoc-members: + + + +ChannelDensityNonUniformGHK +########################### + +.. autoclass:: neuroml.nml.nml.ChannelDensityNonUniformGHK + :members: + :undoc-members: + + + +ChannelDensityNonUniformNernst +############################## + +.. autoclass:: neuroml.nml.nml.ChannelDensityNonUniformNernst + :members: + :undoc-members: + + + +ChannelDensityVShift +#################### + +.. autoclass:: neuroml.nml.nml.ChannelDensityVShift + :members: + :undoc-members: + + + +ChannelPopulation +################# + +.. autoclass:: neuroml.nml.nml.ChannelPopulation + :members: + :undoc-members: + + + +ClosedState +########### + +.. autoclass:: neuroml.nml.nml.ClosedState + :members: + :undoc-members: + + + +ComponentType +############# + +.. autoclass:: neuroml.nml.nml.ComponentType + :members: + :undoc-members: + + + +CompoundInput +############# + +.. autoclass:: neuroml.nml.nml.CompoundInput + :members: + :undoc-members: + + + +CompoundInputDL +############### + +.. autoclass:: neuroml.nml.nml.CompoundInputDL + :members: + :undoc-members: + + + +ConcentrationModel_D +#################### + +.. autoclass:: neuroml.nml.nml.ConcentrationModel_D + :members: + :undoc-members: + + + +ConditionalDerivedVariable +########################## + +.. autoclass:: neuroml.nml.nml.ConditionalDerivedVariable + :members: + :undoc-members: + + + +Connection +########## + +.. autoclass:: neuroml.nml.nml.Connection + :members: + :undoc-members: + + + +ConnectionWD +############ + +.. autoclass:: neuroml.nml.nml.ConnectionWD + :members: + :undoc-members: + + + +Constant +######## + +.. autoclass:: neuroml.nml.nml.Constant + :members: + :undoc-members: + + + +ContinuousConnection +#################### + +.. autoclass:: neuroml.nml.nml.ContinuousConnection + :members: + :undoc-members: + + + +ContinuousConnectionInstance +############################ + +.. autoclass:: neuroml.nml.nml.ContinuousConnectionInstance + :members: + :undoc-members: + + + +ContinuousConnectionInstanceW +############################# + +.. autoclass:: neuroml.nml.nml.ContinuousConnectionInstanceW + :members: + :undoc-members: + + + +ContinuousProjection +#################### + +.. autoclass:: neuroml.nml.nml.ContinuousProjection + :members: + :undoc-members: + + + +DecayingPoolConcentrationModel +############################## + +.. autoclass:: neuroml.nml.nml.DecayingPoolConcentrationModel + :members: + :undoc-members: + + + +DerivedVariable +############### + +.. autoclass:: neuroml.nml.nml.DerivedVariable + :members: + :undoc-members: + + + +DistalDetails +############# + +.. autoclass:: neuroml.nml.nml.DistalDetails + :members: + :undoc-members: + + + +DoubleSynapse +############# + +.. autoclass:: neuroml.nml.nml.DoubleSynapse + :members: + :undoc-members: + + + +Dynamics +######## + +.. autoclass:: neuroml.nml.nml.Dynamics + :members: + :undoc-members: + + + +EIF_cond_alpha_isfa_ista +######################## + +.. autoclass:: neuroml.nml.nml.EIF_cond_alpha_isfa_ista + :members: + :undoc-members: + + + +EIF_cond_exp_isfa_ista +###################### + +.. autoclass:: neuroml.nml.nml.EIF_cond_exp_isfa_ista + :members: + :undoc-members: + + + +ElectricalConnection +#################### + +.. autoclass:: neuroml.nml.nml.ElectricalConnection + :members: + :undoc-members: + + + +ElectricalConnectionInstance +############################ + +.. autoclass:: neuroml.nml.nml.ElectricalConnectionInstance + :members: + :undoc-members: + + + +ElectricalConnectionInstanceW +############################# + +.. autoclass:: neuroml.nml.nml.ElectricalConnectionInstanceW + :members: + :undoc-members: + + + +ElectricalProjection +#################### + +.. autoclass:: neuroml.nml.nml.ElectricalProjection + :members: + :undoc-members: + + + +ExpCondSynapse +############## + +.. autoclass:: neuroml.nml.nml.ExpCondSynapse + :members: + :undoc-members: + + + +ExpCurrSynapse +############## + +.. autoclass:: neuroml.nml.nml.ExpCurrSynapse + :members: + :undoc-members: + + + +ExpOneSynapse +############# + +.. autoclass:: neuroml.nml.nml.ExpOneSynapse + :members: + :undoc-members: + + + +ExpThreeSynapse +############### + +.. autoclass:: neuroml.nml.nml.ExpThreeSynapse + :members: + :undoc-members: + + + +ExpTwoSynapse +############# + +.. autoclass:: neuroml.nml.nml.ExpTwoSynapse + :members: + :undoc-members: + + + +ExplicitInput +############# + +.. autoclass:: neuroml.nml.nml.ExplicitInput + :members: + :undoc-members: + + + +Exposure +######## + +.. autoclass:: neuroml.nml.nml.Exposure + :members: + :undoc-members: + + + +ExtracellularProperties +####################### + +.. autoclass:: neuroml.nml.nml.ExtracellularProperties + :members: + :undoc-members: + + + +ExtracellularPropertiesLocal +############################ + +.. autoclass:: neuroml.nml.nml.ExtracellularPropertiesLocal + :members: + :undoc-members: + + + +FitzHughNagumo1969Cell +###################### + +.. autoclass:: neuroml.nml.nml.FitzHughNagumo1969Cell + :members: + :undoc-members: + + + +FitzHughNagumoCell +################## + +.. autoclass:: neuroml.nml.nml.FitzHughNagumoCell + :members: + :undoc-members: + + + +FixedFactorConcentrationModel +############################# + +.. autoclass:: neuroml.nml.nml.FixedFactorConcentrationModel + :members: + :undoc-members: + + + +ForwardTransition +################# + +.. autoclass:: neuroml.nml.nml.ForwardTransition + :members: + :undoc-members: + + + +GapJunction +########### + +.. autoclass:: neuroml.nml.nml.GapJunction + :members: + :undoc-members: + + + +GateFractional +############## + +.. autoclass:: neuroml.nml.nml.GateFractional + :members: + :undoc-members: + + + +GateFractionalSubgate +##################### + +.. autoclass:: neuroml.nml.nml.GateFractionalSubgate + :members: + :undoc-members: + + + +GateHHInstantaneous +################### + +.. autoclass:: neuroml.nml.nml.GateHHInstantaneous + :members: + :undoc-members: + + + +GateHHRates +########### + +.. autoclass:: neuroml.nml.nml.GateHHRates + :members: + :undoc-members: + + + +GateHHRatesInf +############## + +.. autoclass:: neuroml.nml.nml.GateHHRatesInf + :members: + :undoc-members: + + + +GateHHRatesTau +############## + +.. autoclass:: neuroml.nml.nml.GateHHRatesTau + :members: + :undoc-members: + + + +GateHHRatesTauInf +################# + +.. autoclass:: neuroml.nml.nml.GateHHRatesTauInf + :members: + :undoc-members: + + + +GateHHTauInf +############ + +.. autoclass:: neuroml.nml.nml.GateHHTauInf + :members: + :undoc-members: + + + +GateHHUndetermined +################## + +.. autoclass:: neuroml.nml.nml.GateHHUndetermined + :members: + :undoc-members: + + + +GateKS +###### + +.. autoclass:: neuroml.nml.nml.GateKS + :members: + :undoc-members: + + + +GradedSynapse +############# + +.. autoclass:: neuroml.nml.nml.GradedSynapse + :members: + :undoc-members: + + + +GridLayout +########## + +.. autoclass:: neuroml.nml.nml.GridLayout + :members: + :undoc-members: + + + +HHRate +###### + +.. autoclass:: neuroml.nml.nml.HHRate + :members: + :undoc-members: + + + +HHTime +###### + +.. autoclass:: neuroml.nml.nml.HHTime + :members: + :undoc-members: + + + +HHVariable +########## + +.. autoclass:: neuroml.nml.nml.HHVariable + :members: + :undoc-members: + + + +HH_cond_exp +########### + +.. autoclass:: neuroml.nml.nml.HH_cond_exp + :members: + :undoc-members: + + + +IF_cond_alpha +############# + +.. autoclass:: neuroml.nml.nml.IF_cond_alpha + :members: + :undoc-members: + + + +IF_cond_exp +########### + +.. autoclass:: neuroml.nml.nml.IF_cond_exp + :members: + :undoc-members: + + + +IF_curr_alpha +############# + +.. autoclass:: neuroml.nml.nml.IF_curr_alpha + :members: + :undoc-members: + + + +IF_curr_exp +########### + +.. autoclass:: neuroml.nml.nml.IF_curr_exp + :members: + :undoc-members: + + + +IafCell +####### + +.. autoclass:: neuroml.nml.nml.IafCell + :members: + :undoc-members: + + + +IafRefCell +########## + +.. autoclass:: neuroml.nml.nml.IafRefCell + :members: + :undoc-members: + + + +IafTauCell +########## + +.. autoclass:: neuroml.nml.nml.IafTauCell + :members: + :undoc-members: + + + +IafTauRefCell +############# + +.. autoclass:: neuroml.nml.nml.IafTauRefCell + :members: + :undoc-members: + + + +Include +####### + +.. autoclass:: neuroml.nml.nml.Include + :members: + :undoc-members: + + + +IncludeType +########### + +.. autoclass:: neuroml.nml.nml.IncludeType + :members: + :undoc-members: + + + +InhomogeneousParameter +###################### + +.. autoclass:: neuroml.nml.nml.InhomogeneousParameter + :members: + :undoc-members: + + + +InhomogeneousValue +################## + +.. autoclass:: neuroml.nml.nml.InhomogeneousValue + :members: + :undoc-members: + + + +InitMembPotential +################# + +.. autoclass:: neuroml.nml.nml.InitMembPotential + :members: + :undoc-members: + + + +Input +##### + +.. autoclass:: neuroml.nml.nml.Input + :members: + :undoc-members: + + + +InputList +######### + +.. autoclass:: neuroml.nml.nml.InputList + :members: + :undoc-members: + + + +InputW +###### + +.. autoclass:: neuroml.nml.nml.InputW + :members: + :undoc-members: + + + +Instance +######## + +.. autoclass:: neuroml.nml.nml.Instance + :members: + :undoc-members: + + + +InstanceRequirement +################### + +.. autoclass:: neuroml.nml.nml.InstanceRequirement + :members: + :undoc-members: + + + +IntracellularProperties +####################### + +.. autoclass:: neuroml.nml.nml.IntracellularProperties + :members: + :undoc-members: + + + +IntracellularProperties2CaPools +############################### + +.. autoclass:: neuroml.nml.nml.IntracellularProperties2CaPools + :members: + :undoc-members: + + + +IonChannel +########## + +.. autoclass:: neuroml.nml.nml.IonChannel + :members: + :undoc-members: + + + +IonChannelHH +############ + +.. autoclass:: neuroml.nml.nml.IonChannelHH + :members: + :undoc-members: + + + +IonChannelKS +############ + +.. autoclass:: neuroml.nml.nml.IonChannelKS + :members: + :undoc-members: + + + +IonChannelScalable +################## + +.. autoclass:: neuroml.nml.nml.IonChannelScalable + :members: + :undoc-members: + + + +IonChannelVShift +################ + +.. autoclass:: neuroml.nml.nml.IonChannelVShift + :members: + :undoc-members: + + + +Izhikevich2007Cell +################## + +.. autoclass:: neuroml.nml.nml.Izhikevich2007Cell + :members: + :undoc-members: + + + +IzhikevichCell +############## + +.. autoclass:: neuroml.nml.nml.IzhikevichCell + :members: + :undoc-members: + + + +LEMS_Property +############# + +.. autoclass:: neuroml.nml.nml.LEMS_Property + :members: + :undoc-members: + + + +Layout +###### + +.. autoclass:: neuroml.nml.nml.Layout + :members: + :undoc-members: + + + +LinearGradedSynapse +################### + +.. autoclass:: neuroml.nml.nml.LinearGradedSynapse + :members: + :undoc-members: + + + +Location +######## + +.. autoclass:: neuroml.nml.nml.Location + :members: + :undoc-members: + + + +Member +###### + +.. autoclass:: neuroml.nml.nml.Member + :members: + :undoc-members: + + + +MembraneProperties +################## + +.. autoclass:: neuroml.nml.nml.MembraneProperties + :members: + :undoc-members: + + + +MembraneProperties2CaPools +########################## + +.. autoclass:: neuroml.nml.nml.MembraneProperties2CaPools + :members: + :undoc-members: + + + +Morphology +########## + +.. autoclass:: neuroml.nml.nml.Morphology + :members: + :undoc-members: + + + +NamedDimensionalType +#################### + +.. autoclass:: neuroml.nml.nml.NamedDimensionalType + :members: + :undoc-members: + + + +NamedDimensionalVariable +######################## + +.. autoclass:: neuroml.nml.nml.NamedDimensionalVariable + :members: + :undoc-members: + + + +Network +####### + +.. autoclass:: neuroml.nml.nml.Network + :members: + :undoc-members: + + + +NeuroMLDocument +############### + +.. autoclass:: neuroml.nml.nml.NeuroMLDocument + :members: + :undoc-members: + + + +OpenState +######### + +.. autoclass:: neuroml.nml.nml.OpenState + :members: + :undoc-members: + + + +Parameter +######### + +.. autoclass:: neuroml.nml.nml.Parameter + :members: + :undoc-members: + + + +Path +#### + +.. autoclass:: neuroml.nml.nml.Path + :members: + :undoc-members: + + + +PinskyRinzelCA3Cell +################### + +.. autoclass:: neuroml.nml.nml.PinskyRinzelCA3Cell + :members: + :undoc-members: + + + +PlasticityMechanism +################### + +.. autoclass:: neuroml.nml.nml.PlasticityMechanism + :members: + :undoc-members: + + + +Point3DWithDiam +############### + +.. autoclass:: neuroml.nml.nml.Point3DWithDiam + :members: + :undoc-members: + + + +PoissonFiringSynapse +#################### + +.. autoclass:: neuroml.nml.nml.PoissonFiringSynapse + :members: + :undoc-members: + + + +Population +########## + +.. autoclass:: neuroml.nml.nml.Population + :members: + :undoc-members: + + + +Projection +########## + +.. autoclass:: neuroml.nml.nml.Projection + :members: + :undoc-members: + + + +Property +######## + +.. autoclass:: neuroml.nml.nml.Property + :members: + :undoc-members: + + + +ProximalDetails +############### + +.. autoclass:: neuroml.nml.nml.ProximalDetails + :members: + :undoc-members: + + + +PulseGenerator +############## + +.. autoclass:: neuroml.nml.nml.PulseGenerator + :members: + :undoc-members: + + + +PulseGeneratorDL +################ + +.. autoclass:: neuroml.nml.nml.PulseGeneratorDL + :members: + :undoc-members: + + + +Q10ConductanceScaling +##################### + +.. autoclass:: neuroml.nml.nml.Q10ConductanceScaling + :members: + :undoc-members: + + + +Q10Settings +########### + +.. autoclass:: neuroml.nml.nml.Q10Settings + :members: + :undoc-members: + + + +RampGenerator +############# + +.. autoclass:: neuroml.nml.nml.RampGenerator + :members: + :undoc-members: + + + +RampGeneratorDL +############### + +.. autoclass:: neuroml.nml.nml.RampGeneratorDL + :members: + :undoc-members: + + + +RandomLayout +############ + +.. autoclass:: neuroml.nml.nml.RandomLayout + :members: + :undoc-members: + + + +ReactionScheme +############## + +.. autoclass:: neuroml.nml.nml.ReactionScheme + :members: + :undoc-members: + + + +Region +###### + +.. autoclass:: neuroml.nml.nml.Region + :members: + :undoc-members: + + + +Requirement +########### + +.. autoclass:: neuroml.nml.nml.Requirement + :members: + :undoc-members: + + + +Resistivity +########### + +.. autoclass:: neuroml.nml.nml.Resistivity + :members: + :undoc-members: + + + +ReverseTransition +################# + +.. autoclass:: neuroml.nml.nml.ReverseTransition + :members: + :undoc-members: + + + +Segment +####### + +.. autoclass:: neuroml.nml.nml.Segment + :members: + :undoc-members: + + + +SegmentEndPoint +############### + +.. autoclass:: neuroml.nml.nml.SegmentEndPoint + :members: + :undoc-members: + + + +SegmentGroup +############ + +.. autoclass:: neuroml.nml.nml.SegmentGroup + :members: + :undoc-members: + + + +SegmentParent +############# + +.. autoclass:: neuroml.nml.nml.SegmentParent + :members: + :undoc-members: + + + +SilentSynapse +############# + +.. autoclass:: neuroml.nml.nml.SilentSynapse + :members: + :undoc-members: + + + +SineGenerator +############# + +.. autoclass:: neuroml.nml.nml.SineGenerator + :members: + :undoc-members: + + + +SineGeneratorDL +############### + +.. autoclass:: neuroml.nml.nml.SineGeneratorDL + :members: + :undoc-members: + + + +Space +##### + +.. autoclass:: neuroml.nml.nml.Space + :members: + :undoc-members: + + + +SpaceStructure +############## + +.. autoclass:: neuroml.nml.nml.SpaceStructure + :members: + :undoc-members: + + + +Species +####### + +.. autoclass:: neuroml.nml.nml.Species + :members: + :undoc-members: + + + +SpecificCapacitance +################### + +.. autoclass:: neuroml.nml.nml.SpecificCapacitance + :members: + :undoc-members: + + + +Spike +##### + +.. autoclass:: neuroml.nml.nml.Spike + :members: + :undoc-members: + + + +SpikeArray +########## + +.. autoclass:: neuroml.nml.nml.SpikeArray + :members: + :undoc-members: + + + +SpikeGenerator +############## + +.. autoclass:: neuroml.nml.nml.SpikeGenerator + :members: + :undoc-members: + + + +SpikeGeneratorPoisson +##################### + +.. autoclass:: neuroml.nml.nml.SpikeGeneratorPoisson + :members: + :undoc-members: + + + +SpikeGeneratorRandom +#################### + +.. autoclass:: neuroml.nml.nml.SpikeGeneratorRandom + :members: + :undoc-members: + + + +SpikeGeneratorRefPoisson +######################## + +.. autoclass:: neuroml.nml.nml.SpikeGeneratorRefPoisson + :members: + :undoc-members: + + + +SpikeSourcePoisson +################## + +.. autoclass:: neuroml.nml.nml.SpikeSourcePoisson + :members: + :undoc-members: + + + +SpikeThresh +########### + +.. autoclass:: neuroml.nml.nml.SpikeThresh + :members: + :undoc-members: + + + +Standalone +########## + +.. autoclass:: neuroml.nml.nml.Standalone + :members: + :undoc-members: + + + +StateVariable +############# + +.. autoclass:: neuroml.nml.nml.StateVariable + :members: + :undoc-members: + + + +SubTree +####### + +.. autoclass:: neuroml.nml.nml.SubTree + :members: + :undoc-members: + + + +SynapticConnection +################## + +.. autoclass:: neuroml.nml.nml.SynapticConnection + :members: + :undoc-members: + + + +TauInfTransition +################ + +.. autoclass:: neuroml.nml.nml.TauInfTransition + :members: + :undoc-members: + + + +TimeDerivative +############## + +.. autoclass:: neuroml.nml.nml.TimeDerivative + :members: + :undoc-members: + + + +TimedSynapticInput +################## + +.. autoclass:: neuroml.nml.nml.TimedSynapticInput + :members: + :undoc-members: + + + +TransientPoissonFiringSynapse +############################# + +.. autoclass:: neuroml.nml.nml.TransientPoissonFiringSynapse + :members: + :undoc-members: + + + +UnstructuredLayout +################## + +.. autoclass:: neuroml.nml.nml.UnstructuredLayout + :members: + :undoc-members: + + + +VariableParameter +################# + +.. autoclass:: neuroml.nml.nml.VariableParameter + :members: + :undoc-members: + + + +VoltageClamp +############ + +.. autoclass:: neuroml.nml.nml.VoltageClamp + :members: + :undoc-members: + + + +VoltageClampTriple +################## + +.. autoclass:: neuroml.nml.nml.VoltageClampTriple + :members: + :undoc-members: + + + +basePyNNCell +############ + +.. autoclass:: neuroml.nml.nml.basePyNNCell + :members: + :undoc-members: + + + +basePyNNIaFCell +############### + +.. autoclass:: neuroml.nml.nml.basePyNNIaFCell + :members: + :undoc-members: + + + +basePyNNIaFCondCell +################### + +.. autoclass:: neuroml.nml.nml.basePyNNIaFCondCell + :members: + :undoc-members: + + diff --git a/doc/examples.rst b/doc/userdocs/examples.rst similarity index 58% rename from doc/examples.rst rename to doc/userdocs/examples.rst index f6bf23b5..2b7e1407 100644 --- a/doc/examples.rst +++ b/doc/userdocs/examples.rst @@ -13,38 +13,38 @@ be tested to confirm they work by running the run_all.py script. Creating a NeuroML morphology ----------------------------- -.. literalinclude:: ../neuroml/examples/morphology_generation.py +.. literalinclude:: ../../neuroml/examples/morphology_generation.py Loading and modifying a file ---------------------------- -.. literalinclude:: ../neuroml/examples/loading_modifying_writing.py +.. literalinclude:: ../../neuroml/examples/loading_modifying_writing.py Building a network ------------------ -.. literalinclude:: ../neuroml/examples/build_network.py +.. literalinclude:: ../../neuroml/examples/build_network.py Building a 3D network --------------------- -.. literalinclude:: ../neuroml/examples/build_3D_network.py +.. literalinclude:: ../../neuroml/examples/build_3D_network.py Ion channels ------------ -.. literalinclude:: ../neuroml/examples/ion_channel_generation.py +.. literalinclude:: ../../neuroml/examples/ion_channel_generation.py PyNN models ----------- -.. literalinclude:: ../neuroml/examples/write_pynn.py +.. literalinclude:: ../../neuroml/examples/write_pynn.py Synapses -------- -.. literalinclude:: ../neuroml/examples/write_syns.py +.. literalinclude:: ../../neuroml/examples/write_syns.py Working with JSON serialization ------------------------------- @@ -53,18 +53,18 @@ One thing to note is that the JSONWriter, unlike NeuroMLWriter, will serializing using array-based (Arraymorph) representation if this has been used. -.. literalinclude:: ../neuroml/examples/json_serialization.py +.. literalinclude:: ../../neuroml/examples/json_serialization.py Working with arraymorphs ------------------------ -.. literalinclude:: ../neuroml/examples/arraymorph_generation.py +.. literalinclude:: ../../neuroml/examples/arraymorph_generation.py Working with Izhikevich Cells ----------------------------- These examples were kindly contributed by Steve Marsh -.. literalinclude:: ../neuroml/examples/single_izhikevich_reader.py -.. literalinclude:: ../neuroml/examples/single_izhikevich_writer.py +.. literalinclude:: ../../neuroml/examples/single_izhikevich_reader.py +.. literalinclude:: ../../neuroml/examples/single_izhikevich_writer.py diff --git a/doc/userdocs/get-nml-core-docs.sh b/doc/userdocs/get-nml-core-docs.sh new file mode 100644 index 00000000..5f37b6c6 --- /dev/null +++ b/doc/userdocs/get-nml-core-docs.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +# Copyright 2021 Ankur Sinha +# Author: Ankur Sinha +# File : get-nml-core-docs.sh +# +# Since automodule does not allow us to list each class as a sub-section, the +# NeuroML core class page becomes rather heavy and not easy to navigate. +# The only way around this is to not use automodule, and instead, use autoclass +# individuall for each class. + +# This script will generate the rst file for use by sphinx. + +grep -E '^class.*' ../neuroml/nml/nml.py | sed -e 's/^class //' -e 's/(.*)://' > classlist.txt + + diff --git a/doc/userdocs/index.rst b/doc/userdocs/index.rst new file mode 100644 index 00000000..f12f4592 --- /dev/null +++ b/doc/userdocs/index.rst @@ -0,0 +1,12 @@ +User guide +********** + +.. toctree:: + :maxdepth: 2 + + introduction + install + api + examples + bibliography + diff --git a/doc/install.rst b/doc/userdocs/install.rst similarity index 100% rename from doc/install.rst rename to doc/userdocs/install.rst diff --git a/doc/introduction.rst b/doc/userdocs/introduction.rst similarity index 100% rename from doc/introduction.rst rename to doc/userdocs/introduction.rst diff --git a/doc/loaders.rst b/doc/userdocs/loaders.rst similarity index 100% rename from doc/loaders.rst rename to doc/userdocs/loaders.rst diff --git a/doc/utils.rst b/doc/userdocs/utils.rst similarity index 100% rename from doc/utils.rst rename to doc/userdocs/utils.rst diff --git a/doc/writers.rst b/doc/userdocs/writers.rst similarity index 100% rename from doc/writers.rst rename to doc/userdocs/writers.rst From 5a0a4b5808af36fd0ec6d120b79f204d9e6929e8 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 25 Aug 2021 18:15:10 +0100 Subject: [PATCH 035/136] fix: make pytest ignore python files in doc folder --- pytest.ini | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 pytest.ini diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 00000000..7e1b9b7c --- /dev/null +++ b/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +addopts = --ignore-glob=doc/* From 0ac797d356c5885069bfaf1164b2f9edd09581ea Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 25 Aug 2021 18:18:33 +0100 Subject: [PATCH 036/136] fix: make flake8 ignore docs folder --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 34ad2ac1..bfeeeedd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,4 +3,4 @@ # spacing around operators, comment blocks, in argument lists # lines too long ignore = E501,E502,F403,F405,E231,E228,E225,E226,E265,E261 -exclude = neuroml/nml/nml.py +exclude = neuroml/nml/nml.py,doc From 4115bd2791569f937502eb9083cd280cdfd59f6b Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Thu, 26 Aug 2021 16:57:22 +0100 Subject: [PATCH 037/136] feat: update for py39 --- neuroml/nml/generateds_config.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/neuroml/nml/generateds_config.py b/neuroml/nml/generateds_config.py index 83fc889d..25b1c5c4 100644 --- a/neuroml/nml/generateds_config.py +++ b/neuroml/nml/generateds_config.py @@ -11,13 +11,16 @@ sys.setrecursionlimit(10000) + def remove_curlies(string): return re.sub("{.*}","",string) + def to_lowercase_with_underscores(string): s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', string) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() + def to_camelback(string): string_list = list(string) i = 0 @@ -30,6 +33,7 @@ def to_camelback(string): return str(string) + def traverse_doc(queue,rename): """Recursive function to traverse the nodes of a tree in breadth-first order. @@ -71,10 +75,10 @@ def _node_to_python(node): filename = variables['schema_name'] -import StringIO +import io import process_includes -outfile = StringIO.StringIO() +outfile = io.StringIO() infile = open(filename, 'r') process_includes.process_include_files(infile, outfile, @@ -140,12 +144,12 @@ def _node_to_python(node): print(NameTable) print("Saving NameTable to csv file") -writer = csv.writer(open('name_table.csv', 'wb')) -for key, value in NameTable.items(): - writer.writerow([key, value]) +writer = csv.writer(open('name_table.csv', 'w')) +for key, value in list(NameTable.items()): + writer.writerow([key, value]) -print ("Saving name changes table to csv file") -changes_writer = csv.writer(open('changed_names.csv','wb')) +print("Saving name changes table to csv file") +changes_writer = csv.writer(open('changed_names.csv','w')) for key in NameTable: value = NameTable[key] if key != value: From a03334a4294e22b24eda3add214d9bc646fd25fe Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Thu, 26 Aug 2021 17:23:32 +0100 Subject: [PATCH 038/136] feat: update nml.py with new generateds on Py3.9 --- neuroml/nml/changed_names.csv | 416 +- neuroml/nml/name_table.csv | 666 +- neuroml/nml/nml.py | 20680 +++++++++++++++++++++++--------- 3 files changed, 15357 insertions(+), 6405 deletions(-) diff --git a/neuroml/nml/changed_names.csv b/neuroml/nml/changed_names.csv index 57ff6704..f16984f0 100644 --- a/neuroml/nml/changed_names.csv +++ b/neuroml/nml/changed_names.csv @@ -1,233 +1,233 @@ +http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd,http://www.w3.org/2001/xml_schema http://www.w3.org/2001/xml_schema.xsd +property,properties +unbounded,unboundeds +neuroLexId,neuro_lex_id +fractionAlong,fraction_along +segmentId,segment_id +population,populations +inputW,input_ws +postComponent,post_component +preComponent,pre_component +continuousConnectionInstanceW,continuous_connection_instance_ws +continuousConnectionInstance,continuous_connection_instances +continuousConnection,continuous_connections +electricalConnectionInstanceW,electrical_connection_instance_ws +electricalConnectionInstance,electrical_connection_instances +electricalConnection,electrical_connections +postFractionAlong,post_fraction_along +postSegment,post_segment +postCell,post_cell +preFractionAlong,pre_fraction_along +preSegment,pre_segment +preCell,pre_cell +postSegmentId,post_segment_id +postCellId,post_cell_id +preSegmentId,pre_segment_id +preCellId,pre_cell_id +connectionWD,connection_wds +connection,connections +postsynapticPopulation,postsynaptic_population +presynapticPopulation,presynaptic_population skip,skips -gateHHtauInf,gate_hh_tau_infs -sineGenerator,sine_generators -sineGeneratorDL,sine_generator_dls -cell2CaPools,cell2_ca_poolses -xSpacing,x_spacing -plasticityMechanism,plasticity_mechanism -alphaCondSynapse,alpha_cond_synapses -channelDensityNonUniform,channel_density_non_uniforms +xs:nonNegativeInteger,xs:non_negative_integer +zSize,z_size +ySize,y_size +xSize,x_size +region,regions +space,spaces +populationTypes,population_types +populationList,population_list +extracellularProperties,extracellular_properties +instance,instances +allowedSpaces,allowed_spaces zStart,z_start -membraneProperties,membrane_properties -inhomogeneousParameter,inhomogeneous_parameters -delT,del_t +yStart,y_start +xStart,x_start +zSpacing,z_spacing +ySpacing,y_spacing +xSpacing,x_spacing +basedOn,based_on +networkTypes,network_types +networkWithTemperature,network_with_temperature +network,networks +inputList,input_lists +explicitInput,explicit_inputs +continuousProjection,continuous_projections +electricalProjection,electrical_projections +projection,projections +synapticConnection,synaptic_connections +cellSet,cell_sets +spikeTarget,spike_target +averageRate,average_rate +minimumISI,minimum_isi +minISI,min_isi +maxISI,max_isi +spike,spikes +simpleSeriesResistance,simple_series_resistance +returnVoltage,return_voltage +testingVoltage,testing_voltage +conditioningVoltage,conditioning_voltage targetVoltage,target_voltage -xs:anyURI,xs:any_uri -gLd,g_ld -biophysicalProperties,biophysical_properties -presynapticPopulation,presynaptic_population +rampGeneratorDL,ramp_generator_dls +sineGeneratorDL,sine_generator_dls +pulseGeneratorDL,pulse_generator_dls +rampGenerator,ramp_generators +sineGenerator,sine_generators +pulseGenerator,pulse_generators +baselineAmplitude,baseline_amplitude finishAmplitude,finish_amplitude -gLs,g_ls +startAmplitude,start_amplitude +resistivity,resistivities +decayingPoolConcentrationModel,decaying_pool_concentration_models segmentGroup,segment_groups -alphaCurrentSynapse,alpha_current_synapses -poissonFiringSynapse,poisson_firing_synapses -cell,cells -path,paths -alphaCurrSynapse,alpha_curr_synapses -postsynapticPopulation,postsynaptic_population -gKdr,g_kdr -preFractionAlong,pre_fraction_along -projection,projections -forwardTransition,forward_transition -averageRate,average_rate -cellSet,cell_sets +initialExtConcentration,initial_ext_concentration +initialConcentration,initial_concentration +concentrationModel,concentration_model +inhomogeneousParameter,inhomogeneous_parameters +inhomogeneousValue,inhomogeneous_value +segment,segments +condDensity,cond_density +ionChannel,ion_channel +variableParameter,variable_parameters +vShift,v_shift channelDensityNernstCa2,channel_density_nernst_ca2s -postSegmentId,post_segment_id -spike,spikes -fitzHughNagumoCell,fitz_hugh_nagumo_cells -channelPopulation,channel_populations -continuousProjection,continuous_projections -include,includes +initMembPotential,init_memb_potentials specificCapacitance,specific_capacitances -allowedSpaces,allowed_spaces -xSize,x_size -explicitInput,explicit_inputs -xs:nonNegativeInteger,xs:non_negative_integer -concentrationModel,concentration_model -ionChannelHH,ion_channel_hhs +spikeThresh,spike_threshes +channelDensityNonUniformGHK,channel_density_non_uniform_ghks +channelDensityNonUniformNernst,channel_density_non_uniform_nernsts +channelDensityNonUniform,channel_density_non_uniforms +channelDensityGHK2,channel_density_ghk2s channelDensityGHK,channel_density_ghks -intracellularProperties2CaPools,intracellular_properties2_ca_pools -linearGradedSynapse,linear_graded_synapses -iafCell,iaf_cells -silentSynapse,silent_synapses -spikeGeneratorRandom,spike_generator_randoms -gate,gates -steadyState,steady_state -channelDensity,channel_densities -expCondSynapse,exp_cond_synapses -continuousConnectionInstanceW,continuous_connection_instance_ws -expTwoSynapse,exp_two_synapses -gateKS,gate_kses -izhikevich2007Cell,izhikevich2007_cells -decayConstant,decay_constant -expOneSynapse,exp_one_synapses -baseCell,base_cells -tauDecay,tau_decay -iDend,i_dend -tsodyksMarkramDepFacMechanism,tsodyks_markram_dep_fac_mechanism -reverseRate,reverse_rate channelDensityNernst,channel_density_nernsts -reverseTransition,reverse_transition -preComponent,pre_component -minISI,min_isi -gCa,g_ca -eNa,e_na -transientPoissonFiringSynapse,transient_poisson_firing_synapses -preCell,pre_cell -blockConcentration,block_concentration -inputList,input_lists -tauRec,tau_rec -networkTypes,network_types -inhomogeneousValue,inhomogeneous_value +channelDensityVShift,channel_density_v_shifts +channelDensity,channel_densities +channelPopulation,channel_populations +intracellularProperties2CaPools,intracellular_properties2_ca_pools +membraneProperties2CaPools,membrane_properties2_ca_pools +intracellularProperties,intracellular_properties +membraneProperties,membrane_properties +normalizationEnd,normalization_end +translationStart,translation_start +subTree,sub_trees +path,paths +include,includes +member,members +biophysicalProperties2CaPools,biophysical_properties2_ca_pools +biophysicalProperties,biophysical_properties eL,e_l -network,networks eK,e_k -space,spaces -forwardRate,forward_rate -gKC,g_kc -rampGeneratorDL,ramp_generator_dls -neuroLexId,neuro_lex_id -returnVoltage,return_voltage -intracellularProperties,intracellular_properties -q10Settings,q10_settings -initialConcentration,initial_concentration -decayingPoolConcentrationModel,decaying_pool_concentration_models -voltageClampTriple,voltage_clamp_triples -alphaSynapse,alpha_synapses -preCellId,pre_cell_id -pulseGeneratorDL,pulse_generator_dls -gateHHrates,gate_hh_rates -population,populations eCa,e_ca -shellThickness,shell_thickness -expCurrSynapse,exp_curr_synapses -resistivity,resistivities -ySpacing,y_spacing -ySize,y_size -gateFractional,gate_fractionals -blockMechanism,block_mechanism -instance,instances -connectionWD,connection_wds -gradedSynapse,graded_synapses -gapJunction,gap_junctions -baselineAmplitude,baseline_amplitude -segment,segments -postFractionAlong,post_fraction_along -ionChannelVShift,ion_channel_v_shifts -spikeGeneratorRefPoisson,spike_generator_ref_poissons -channelTypes,channel_types -xStart,x_start -voltageClamp,voltage_clamps -testingVoltage,testing_voltage -adExIaFCell,ad_ex_ia_f_cells -ionChannelKS,ion_channel_kses -preSegmentId,pre_segment_id -spikeThresh,spike_threshes -spikeGeneratorPoisson,spike_generator_poissons -vShift,v_shift -ionChannelPassive,ion_channel_passive -subGate,sub_gates -gL,g_l -tauRise,tau_rise -izhikevichCell,izhikevich_cells -postCellId,post_cell_id -condDensity,cond_density -gateHHratesTauInf,gate_h_hrates_tau_infs -fixedFactorConcentrationModel,fixed_factor_concentration_models -fixedQ10,fixed_q10 -region,regions -extracellularProperties,extracellular_properties -connection,connections -http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd,http://www.w3.org/2001/xml_schema http://www.w3.org/2001/xml_schema.xsd -electricalConnectionInstance,electrical_connection_instances -translationStart,translation_start -closedState,closed_states -gNa,g_na +eNa,e_na +gAmpa,g_ampa +gNmda,g_nmda +gKC,g_kc gKahp,g_kahp -electricalConnectionInstanceW,electrical_connection_instance_ws -spikeTarget,spike_target -yStart,y_start +gCa,g_ca +gKdr,g_kdr +gNa,g_na +gLd,g_ld +gLs,g_ls +iDend,i_dend +iSoma,i_soma +delT,del_t +gL,g_l +leakConductance,leak_conductance +leakReversal,leak_reversal tauFac,tau_fac -q10Factor,q10_factor -populationList,population_list -zSpacing,z_spacing +tauRec,tau_rec +initReleaseProb,init_release_prob +tsodyksMarkramDepFacMechanism,tsodyks_markram_dep_fac_mechanism +tsodyksMarkramDepMechanism,tsodyks_markram_dep_mechanism +scalingVolt,scaling_volt +scalingConc,scaling_conc +blockConcentration,block_concentration +voltageConcDepBlockMechanism,voltage_conc_dep_block_mechanism +blockMechanism,block_mechanism +plasticityMechanism,plasticity_mechanism synapse2Path,synapse2_path +synapse1Path,synapse1_path +tauRise,tau_rise +tauDecay2,tau_decay2 +tauDecay1,tau_decay1 +tauDecay,tau_decay +decayConstant,decay_constant +restingConc,resting_conc +shellThickness,shell_thickness experimentalTemp,experimental_temp -openState,open_states -property,properties -zSize,z_size -compoundInput,compound_inputs -ionChannel,ion_channel -iafTauRefCell,iaf_tau_ref_cells -scalingConc,scaling_conc -pinskyRinzelCA3Cell,pinsky_rinzel_ca3_cells +q10Factor,q10_factor +fixedQ10,fixed_q10 +fractionalConductance,fractional_conductance +timeCourse,time_course +steadyState,steady_state +q10Settings,q10_settings +subGate,sub_gates +reverseRate,reverse_rate +forwardRate,forward_rate +gateTypes,gate_types tauInfTransition,tau_inf_transition -member,members +reverseTransition,reverse_transition +forwardTransition,forward_transition +openState,open_states +closedState,closed_states +gateFractional,gate_fractionals +gateKS,gate_kses gateHHInstantaneous,gate_hh_instantaneouses -populationTypes,population_types -conditioningVoltage,conditioning_voltage -variableParameter,variable_parameters -iSoma,i_soma -channelDensityVShift,channel_density_v_shifts -biophysicalProperties2CaPools,biophysical_properties2_ca_pools -fractionalConductance,fractional_conductance -tsodyksMarkramDepMechanism,tsodyks_markram_dep_mechanism -fitzHughNagumo1969Cell,fitz_hugh_nagumo1969_cells -segmentId,segment_id +gateHHratesTauInf,gate_h_hrates_tau_infs +gateHHratesInf,gate_h_hrates_infs +gateHHtauInf,gate_hh_tau_infs +gateHHratesTau,gate_h_hrates_taus +gateHHrates,gate_hh_rates +channelTypes,channel_types +ionChannelHH,ion_channel_hhs +ionChannelPassive,ion_channel_passive +gate,gates q10ConductanceScaling,q10_conductance_scalings -rampGenerator,ramp_generators -voltageConcDepBlockMechanism,voltage_conc_dep_block_mechanism -postComponent,post_component -initReleaseProb,init_release_prob -maxISI,max_isi -doubleSynapse,double_synapses -timeCourse,time_course +fixedFactorConcentrationModel,fixed_factor_concentration_models +transientPoissonFiringSynapse,transient_poisson_firing_synapses +poissonFiringSynapse,poisson_firing_synapses +spikeGeneratorRefPoisson,spike_generator_ref_poissons +spikeGeneratorPoisson,spike_generator_poissons +spikeGeneratorRandom,spike_generator_randoms spikeGenerator,spike_generators timedSynapticInput,timed_synaptic_inputs -gateHHratesTau,gate_h_hrates_taus -continuousConnection,continuous_connections -networkWithTemperature,network_with_temperature -fractionAlong,fraction_along -normalizationEnd,normalization_end -synapse1Path,synapse1_path -electricalProjection,electrical_projections -gateHHratesInf,gate_h_hrates_infs -simpleSeriesResistance,simple_series_resistance -channelDensityGHK2,channel_density_ghk2s -preSegment,pre_segment -gNmda,g_nmda -scalingVolt,scaling_volt -initialExtConcentration,initial_ext_concentration -expThreeSynapse,exp_three_synapses -startAmplitude,start_amplitude -unbounded,unboundeds +spikeArray,spike_arrays +voltageClampTriple,voltage_clamp_triples +voltageClamp,voltage_clamps compoundInputDL,compound_input_dls -xs:positiveInteger,xs:positive_integer -membraneProperties2CaPools,membrane_properties2_ca_pools +compoundInput,compound_inputs +alphaCurrSynapse,alpha_curr_synapses +expCurrSynapse,exp_curr_synapses +alphaCondSynapse,alpha_cond_synapses +expCondSynapse,exp_cond_synapses +gradedSynapse,graded_synapses +linearGradedSynapse,linear_graded_synapses +silentSynapse,silent_synapses +gapJunction,gap_junctions +doubleSynapse,double_synapses +blockingPlasticSynapse,blocking_plastic_synapses +expThreeSynapse,exp_three_synapses +expTwoSynapse,exp_two_synapses +expOneSynapse,exp_one_synapses +alphaSynapse,alpha_synapses +alphaCurrentSynapse,alpha_current_synapses +pinskyRinzelCA3Cell,pinsky_rinzel_ca3_cells +fitzHughNagumo1969Cell,fitz_hugh_nagumo1969_cells +fitzHughNagumoCell,fitz_hugh_nagumo_cells +adExIaFCell,ad_ex_ia_f_cells +izhikevich2007Cell,izhikevich2007_cells +izhikevichCell,izhikevich_cells +iafRefCell,iaf_ref_cells +iafCell,iaf_cells +iafTauRefCell,iaf_tau_ref_cells +iafTauCell,iaf_tau_cells +baseCell,base_cells +cell2CaPools,cell2_ca_poolses +cell,cells +xs:anyURI,xs:any_uri +ionChannelKS,ion_channel_kses +ionChannelVShift,ion_channel_v_shifts defaultValue,default_value -inputW,input_ws -continuousConnectionInstance,continuous_connection_instances -tauDecay1,tau_decay1 -leakConductance,leak_conductance -tauDecay2,tau_decay2 -subTree,sub_trees +xs:positiveInteger,xs:positive_integer javax.xml.bind.DatatypeConverter.parseInt,javax.xml.bind._datatype_converter.parse_int -iafTauCell,iaf_tau_cells -pulseGenerator,pulse_generators -iafRefCell,iaf_ref_cells -postSegment,post_segment javax.xml.bind.DatatypeConverter.printInt,javax.xml.bind._datatype_converter.print_int -gAmpa,g_ampa -electricalConnection,electrical_connections -synapticConnection,synaptic_connections -gateTypes,gate_types -channelDensityNonUniformNernst,channel_density_non_uniform_nernsts -restingConc,resting_conc -basedOn,based_on -leakReversal,leak_reversal -channelDensityNonUniformGHK,channel_density_non_uniform_ghks -initMembPotential,init_memb_potentials -postCell,post_cell -spikeArray,spike_arrays -blockingPlasticSynapse,blocking_plastic_synapses -minimumISI,minimum_isi diff --git a/neuroml/nml/name_table.csv b/neuroml/nml/name_table.csv index d7bf4692..5ae54378 100644 --- a/neuroml/nml/name_table.csv +++ b/neuroml/nml/name_table.csv @@ -1,365 +1,365 @@ -all,all -skip,skips -gateHHtauInf,gate_hh_tau_infs -gc,gc -morphology,morphology -gbase2,gbase2 -gbase1,gbase1 -sineGenerator,sine_generators -sineGeneratorDL,sine_generator_dls -cell2CaPools,cell2_ca_poolses -to,to -xSpacing,x_spacing -neuroml,neuroml -plasticityMechanism,plasticity_mechanism -unstructured,unstructured -alphaCondSynapse,alpha_cond_synapses -channelDensityNonUniform,channel_density_non_uniforms -synapse2,synapse2 -synapse1,synapse1 -condition,condition +http://www.neuroml.org/schema/neuroml2,http://www.neuroml.org/schema/neuroml2 +http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd,http://www.w3.org/2001/xml_schema http://www.w3.org/2001/xml_schema.xsd +qualified,qualified +unqualified,unqualified +metaid,metaid +optional,optional +annotation,annotation +property,properties +unbounded,unboundeds notes,notes -zStart,z_start -membraneProperties,membrane_properties -inhomogeneousParameter,inhomogeneous_parameters -delT,del_t -targetVoltage,target_voltage -xs:anyURI,xs:any_uri -gLd,g_ld +id,id +required,required +neuroLexId,neuro_lex_id rate,rate -biophysicalProperties,biophysical_properties -presynapticPopulation,presynaptic_population -finishAmplitude,finish_amplitude -gLs,g_ls -segmentGroup,segment_groups -alphaCurrentSynapse,alpha_current_synapses -poissonFiringSynapse,poisson_firing_synapses -cell,cells -gbase,gbase +duration,duration +start,start +e_rev,e_rev +xs:float,xs:float +tau_syn,tau_syn +basePyNNCell,basePyNNCell +gbar_Na,gbar_Na +gbar_K,gbar_K +g_leak,g_leak +e_rev_leak,e_rev_leak +e_rev_Na,e_rev_Na +e_rev_K,e_rev_K +e_rev_I,e_rev_I +e_rev_E,e_rev_E +v_offset,v_offset +basePyNNIaFCondCell,basePyNNIaFCondCell +v_spike,v_spike +tau_w,tau_w +delta_T,delta_T +b,b +a,a +basePyNNIaFCell,basePyNNIaFCell +v_thresh,v_thresh +v_rest,v_rest v_reset,v_reset +tau_refrac,tau_refrac +tau_m,tau_m +v_init,v_init +tau_syn_I,tau_syn_I +tau_syn_E,tau_syn_E +i_offset,i_offset +cm,cm +weight,weight +fractionAlong,fraction_along +segmentId,segment_id +destination,destination +target,target +xs:string,xs:string component,component -active,active -path,paths -alphaCurrSynapse,alpha_curr_synapses -postsynapticPopulation,postsynaptic_population -gKdr,g_kdr -diameter,diameter +population,populations +inputW,input_ws +input,input +postComponent,post_component +preComponent,pre_component +continuousConnectionInstanceW,continuous_connection_instance_ws +continuousConnectionInstance,continuous_connection_instances +continuousConnection,continuous_connections +synapse,synapse +electricalConnectionInstanceW,electrical_connection_instance_ws +electricalConnectionInstance,electrical_connection_instances +electricalConnection,electrical_connections +delay,delay +postFractionAlong,post_fraction_along +postSegment,post_segment +postCell,post_cell preFractionAlong,pre_fraction_along -projection,projections -href,href -qd0,qd0 -select,select -forwardTransition,forward_transition -averageRate,average_rate -cellSet,cell_sets -channelDensityNernstCa2,channel_density_nernst_ca2s +preSegment,pre_segment +preCell,pre_cell postSegmentId,post_segment_id -spike,spikes +postCellId,post_cell_id +preSegmentId,pre_segment_id +preCellId,pre_cell_id +connectionWD,connection_wds +connection,connections +postsynapticPopulation,postsynaptic_population +presynapticPopulation,presynaptic_population +to,to +select,select +skip,skips +z,z +y,y +x,x +k,k +xs:nonNegativeInteger,xs:non_negative_integer +j,j +i,i +location,location +zSize,z_size +ySize,y_size +xSize,x_size +region,regions +number,number +space,spaces +unstructured,unstructured +grid,grid +random,random +populationTypes,population_types +populationList,population_list +extracellularProperties,extracellular_properties type,type -fitzHughNagumoCell,fitz_hugh_nagumo_cells -channelPopulation,channel_populations -continuousProjection,continuous_projections -include,includes -i_offset,i_offset -specificCapacitance,specific_capacitances +size,size +instance,instances +layout,layout allowedSpaces,allowed_spaces -delta_T,delta_T -tauw,tauw -xSize,x_size -explicitInput,explicit_inputs -tau,tau -xs:nonNegativeInteger,xs:non_negative_integer -concentrationModel,concentration_model -ionChannelHH,ion_channel_hhs -channelDensityGHK,channel_density_ghks -intracellularProperties2CaPools,intracellular_properties2_ca_pools -tag,tag -v_rest,v_rest -v_spike,v_spike -linearGradedSynapse,linear_graded_synapses -iafCell,iaf_cells -species,species -silentSynapse,silent_synapses -spikeGeneratorRandom,spike_generator_randoms -gate,gates -steadyState,steady_state -parameter,parameter -y,y -e_rev_leak,e_rev_leak -channelDensity,channel_densities -expCondSynapse,exp_cond_synapses -continuousConnectionInstanceW,continuous_connection_instance_ws -expTwoSynapse,exp_two_synapses -annotation,annotation -a,a -gateKS,gate_kses -gbar_Na,gbar_Na -izhikevich2007Cell,izhikevich2007_cells -IF_curr_alpha,IF_curr_alpha -decayConstant,decay_constant -expOneSynapse,exp_one_synapses -baseCell,base_cells -tauDecay,tau_decay -iDend,i_dend -phi,phi -tsodyksMarkramDepFacMechanism,tsodyks_markram_dep_fac_mechanism -delay,delay -reverseRate,reverse_rate -channelDensityNernst,channel_density_nernsts -reverseTransition,reverse_transition -preComponent,pre_component -basePyNNIaFCondCell,basePyNNIaFCondCell -minISI,min_isi -gCa,g_ca -eNa,e_na -transientPoissonFiringSynapse,transient_poisson_firing_synapses -qualified,qualified -name,name -preCell,pre_cell -blockConcentration,block_concentration -inputList,input_lists -tauRec,tau_rec -weight,weight +zStart,z_start +yStart,y_start +xStart,x_start +zSpacing,z_spacing +ySpacing,y_spacing +xSpacing,x_spacing +basedOn,based_on +structure,structure networkTypes,network_types -inhomogeneousValue,inhomogeneous_value -conductance,conductance -description,description -eL,e_l +networkWithTemperature,network_with_temperature network,networks -eK,e_k -space,spaces -forwardRate,forward_rate -midpoint,midpoint -tau_syn_E,tau_syn_E -tau_syn_I,tau_syn_I -gKC,g_kc -rampGeneratorDL,ramp_generator_dls -vpeak,vpeak -alphac,alphac -neuroLexId,neuro_lex_id -returnVoltage,return_voltage -intracellularProperties,intracellular_properties -q10Settings,q10_settings -distal,distal -number,number -initialConcentration,initial_concentration -instances,instances -xs:string,xs:string -size,size temperature,temperature -decayingPoolConcentrationModel,decaying_pool_concentration_models -voltageClampTriple,voltage_clamp_triples -alphaSynapse,alpha_synapses -preCellId,pre_cell_id +inputList,input_lists +explicitInput,explicit_inputs +continuousProjection,continuous_projections +electricalProjection,electrical_projections +projection,projections +synapticConnection,synaptic_connections +cellSet,cell_sets +spikeTarget,spike_target +averageRate,average_rate +minimumISI,minimum_isi +minISI,min_isi +maxISI,max_isi +period,period +spike,spikes +time,time +simpleSeriesResistance,simple_series_resistance +returnVoltage,return_voltage +testingVoltage,testing_voltage +conditioningVoltage,conditioning_voltage +active,active +targetVoltage,target_voltage +rampGeneratorDL,ramp_generator_dls +sineGeneratorDL,sine_generator_dls pulseGeneratorDL,pulse_generator_dls -thresh,thresh -gateHHrates,gate_hh_rates -population,populations -b,b -target,target -eCa,e_ca -shellThickness,shell_thickness -expCurrSynapse,exp_curr_synapses -amplitude,amplitude -resistivity,resistivities -ySpacing,y_spacing -ySize,y_size -gateFractional,gate_fractionals -blockMechanism,block_mechanism -instance,instances -connectionWD,connection_wds -gradedSynapse,graded_synapses -gapJunction,gap_junctions +rampGenerator,ramp_generators +sineGenerator,sine_generators +pulseGenerator,pulse_generators baselineAmplitude,baseline_amplitude -rho,rho +finishAmplitude,finish_amplitude +startAmplitude,start_amplitude +amplitude,amplitude phase,phase +source,source +species,species +resistivity,resistivities +decayingPoolConcentrationModel,decaying_pool_concentration_models +segmentGroup,segment_groups +all,all +initialExtConcentration,initial_ext_concentration +initialConcentration,initial_concentration +ion,ion +concentrationModel,concentration_model +value,value +inhomogeneousParameter,inhomogeneous_parameters +parameter,parameter +inhomogeneousValue,inhomogeneous_value segment,segments -refract,refract -postFractionAlong,post_fraction_along -ionChannelVShift,ion_channel_v_shifts -spikeGeneratorRefPoisson,spike_generator_ref_poissons -random,random -channelTypes,channel_types -xStart,x_start -xs:float,xs:float -tau_m,tau_m -layout,layout -basePyNNIaFCell,basePyNNIaFCell -voltageClamp,voltage_clamps -tau_w,tau_w -testingVoltage,testing_voltage -adExIaFCell,ad_ex_ia_f_cells -ionChannelKS,ion_channel_kses -preSegmentId,pre_segment_id -spikeThresh,spike_threshes -synapse,synapse -spikeGeneratorPoisson,spike_generator_poissons +condDensity,cond_density +ionChannel,ion_channel +permeability,permeability +variableParameter,variable_parameters vShift,v_shift -x,x -e_rev,e_rev -v_offset,v_offset -ionChannelPassive,ion_channel_passive -subGate,sub_gates erev,erev -parent,parent -gL,g_l -tauRise,tau_rise -izhikevichCell,izhikevich_cells -postCellId,post_cell_id -condDensity,cond_density -gateHHratesTauInf,gate_h_hrates_tau_infs -c,c -fixedFactorConcentrationModel,fixed_factor_concentration_models -fixedQ10,fixed_q10 -region,regions -extracellularProperties,extracellular_properties -connection,connections -http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd,http://www.w3.org/2001/xml_schema http://www.w3.org/2001/xml_schema.xsd -electricalConnectionInstance,electrical_connection_instances -cm,cm +channelDensityNernstCa2,channel_density_nernst_ca2s +initMembPotential,init_memb_potentials +specificCapacitance,specific_capacitances +spikeThresh,spike_threshes +channelDensityNonUniformGHK,channel_density_non_uniform_ghks +channelDensityNonUniformNernst,channel_density_non_uniform_nernsts +channelDensityNonUniform,channel_density_non_uniforms +channelDensityGHK2,channel_density_ghk2s +channelDensityGHK,channel_density_ghks +channelDensityNernst,channel_density_nernsts +channelDensityVShift,channel_density_v_shifts +channelDensity,channel_densities +channelPopulation,channel_populations +intracellularProperties2CaPools,intracellular_properties2_ca_pools +membraneProperties2CaPools,membrane_properties2_ca_pools +intracellularProperties,intracellular_properties +membraneProperties,membrane_properties +normalizationEnd,normalization_end +xs:double,xs:double translationStart,translation_start -closedState,closed_states -period,period -gNa,g_na -gKahp,g_kahp +metric,metric +variable,variable +distal,distal +proximal,proximal +subTree,sub_trees +path,paths +include,includes +member,members +diameter,diameter +name,name +parent,parent +biophysicalProperties2CaPools,biophysical_properties2_ca_pools +biophysicalProperties,biophysical_properties +morphology,morphology +betac,betac +alphac,alphac pp,pp -electricalConnectionInstanceW,electrical_connection_instance_ws -spikeTarget,spike_target -optional,optional -yStart,y_start +qd0,qd0 +eL,e_l +eK,e_k +eCa,e_ca +eNa,e_na +gAmpa,g_ampa +gNmda,g_nmda +gKC,g_kc +gKahp,g_kahp +gCa,g_ca +gKdr,g_kdr +gNa,g_na +gLd,g_ld +gLs,g_ls +gc,gc +iDend,i_dend +iSoma,i_soma +phi,phi +refract,refract +tauw,tauw +delT,del_t +thresh,thresh +reset,reset +gL,g_l +d,d +c,c +vpeak,vpeak +vt,vt +vr,vr +v0,v0 +leakConductance,leak_conductance +leakReversal,leak_reversal +tau,tau tauFac,tau_fac -q10Factor,q10_factor -populationList,population_list -zSpacing,z_spacing +tauRec,tau_rec +initReleaseProb,init_release_prob +tsodyksMarkramDepFacMechanism,tsodyks_markram_dep_fac_mechanism +tsodyksMarkramDepMechanism,tsodyks_markram_dep_mechanism +scalingVolt,scaling_volt +scalingConc,scaling_conc +blockConcentration,block_concentration +voltageConcDepBlockMechanism,voltage_conc_dep_block_mechanism +blockMechanism,block_mechanism +plasticityMechanism,plasticity_mechanism synapse2Path,synapse2_path -betac,betac +synapse1Path,synapse1_path +synapse2,synapse2 +synapse1,synapse1 +tauRise,tau_rise +tauDecay2,tau_decay2 +tauDecay1,tau_decay1 +tauDecay,tau_decay +ibase,ibase +delta,delta +conductance,conductance +gbase2,gbase2 +gbase1,gbase1 +gbase,gbase +rho,rho +decayConstant,decay_constant +restingConc,resting_conc +shellThickness,shell_thickness +scale,scale +midpoint,midpoint experimentalTemp,experimental_temp -value,value -openState,open_states -property,properties -k,k -zSize,z_size -tau_refrac,tau_refrac -metric,metric -compoundInput,compound_inputs -ionChannel,ion_channel -vr,vr -vt,vt -iafTauRefCell,iaf_tau_ref_cells -id,id -basePyNNCell,basePyNNCell -scalingConc,scaling_conc -pinskyRinzelCA3Cell,pinsky_rinzel_ca3_cells +q10Factor,q10_factor +fixedQ10,fixed_q10 +fractionalConductance,fractional_conductance +timeCourse,time_course +steadyState,steady_state +q10Settings,q10_settings +instances,instances +subGate,sub_gates +reverseRate,reverse_rate +forwardRate,forward_rate +gateTypes,gate_types tauInfTransition,tau_inf_transition -member,members +reverseTransition,reverse_transition +forwardTransition,forward_transition +openState,open_states +closedState,closed_states +gateFractional,gate_fractionals +gateKS,gate_kses gateHHInstantaneous,gate_hh_instantaneouses -populationTypes,population_types -conditioningVoltage,conditioning_voltage -ibase,ibase -v0,v0 -ion,ion -variableParameter,variable_parameters -unqualified,unqualified -iSoma,i_soma -g_leak,g_leak -i,i -channelDensityVShift,channel_density_v_shifts -biophysicalProperties2CaPools,biophysical_properties2_ca_pools -fractionalConductance,fractional_conductance -dimension,dimension -http://www.neuroml.org/schema/neuroml2,http://www.neuroml.org/schema/neuroml2 -tsodyksMarkramDepMechanism,tsodyks_markram_dep_mechanism -proximal,proximal -fitzHughNagumo1969Cell,fitz_hugh_nagumo1969_cells -segmentId,segment_id +gateHHratesTauInf,gate_h_hrates_tau_infs +gateHHratesInf,gate_h_hrates_infs +gateHHtauInf,gate_hh_tau_infs +gateHHratesTau,gate_h_hrates_taus +gateHHrates,gate_hh_rates +channelTypes,channel_types +ionChannelHH,ion_channel_hhs +ionChannelPassive,ion_channel_passive +gate,gates q10ConductanceScaling,q10_conductance_scalings -rampGenerator,ramp_generators -source,source -extends,extends -location,location -input,input -voltageConcDepBlockMechanism,voltage_conc_dep_block_mechanism -postComponent,post_component -initReleaseProb,init_release_prob -maxISI,max_isi -doubleSynapse,double_synapses -grid,grid -IF_cond_alpha,IF_cond_alpha -d,d -v_thresh,v_thresh -timeCourse,time_course +fixedFactorConcentrationModel,fixed_factor_concentration_models +transientPoissonFiringSynapse,transient_poisson_firing_synapses +poissonFiringSynapse,poisson_firing_synapses +spikeGeneratorRefPoisson,spike_generator_ref_poissons +spikeGeneratorPoisson,spike_generator_poissons +spikeGeneratorRandom,spike_generator_randoms spikeGenerator,spike_generators timedSynapticInput,timed_synaptic_inputs -gateHHratesTau,gate_h_hrates_taus -continuousConnection,continuous_connections -duration,duration -scale,scale -metaid,metaid -networkWithTemperature,network_with_temperature -fractionAlong,fraction_along -normalizationEnd,normalization_end -synapse1Path,synapse1_path -electricalProjection,electrical_projections -IF_curr_exp,IF_curr_exp -gateHHratesInf,gate_h_hrates_infs -simpleSeriesResistance,simple_series_resistance -e_rev_Na,e_rev_Na -IF_cond_exp,IF_cond_exp -tau_syn,tau_syn -channelDensityGHK2,channel_density_ghk2s -e_rev_E,e_rev_E -e_rev_I,e_rev_I -e_rev_K,e_rev_K -preSegment,pre_segment -v_init,v_init -gNmda,g_nmda -scalingVolt,scaling_volt -initialExtConcentration,initial_ext_concentration -expThreeSynapse,exp_three_synapses -destination,destination -startAmplitude,start_amplitude -unbounded,unboundeds +spikeArray,spike_arrays +voltageClampTriple,voltage_clamp_triples +voltageClamp,voltage_clamps compoundInputDL,compound_input_dls -xs:positiveInteger,xs:positive_integer -membraneProperties2CaPools,membrane_properties2_ca_pools +compoundInput,compound_inputs +alphaCurrSynapse,alpha_curr_synapses +expCurrSynapse,exp_curr_synapses +alphaCondSynapse,alpha_cond_synapses +expCondSynapse,exp_cond_synapses +gradedSynapse,graded_synapses +linearGradedSynapse,linear_graded_synapses +silentSynapse,silent_synapses +gapJunction,gap_junctions +doubleSynapse,double_synapses +blockingPlasticSynapse,blocking_plastic_synapses +expThreeSynapse,exp_three_synapses +expTwoSynapse,exp_two_synapses +expOneSynapse,exp_one_synapses +alphaSynapse,alpha_synapses +alphaCurrentSynapse,alpha_current_synapses +pinskyRinzelCA3Cell,pinsky_rinzel_ca3_cells +fitzHughNagumo1969Cell,fitz_hugh_nagumo1969_cells +fitzHughNagumoCell,fitz_hugh_nagumo_cells +adExIaFCell,ad_ex_ia_f_cells +izhikevich2007Cell,izhikevich2007_cells +izhikevichCell,izhikevich_cells +iafRefCell,iaf_ref_cells +iafCell,iaf_cells +iafTauRefCell,iaf_tau_ref_cells +iafTauCell,iaf_tau_cells +baseCell,base_cells +cell2CaPools,cell2_ca_poolses +cell,cells +href,href +xs:anyURI,xs:any_uri +ionChannelKS,ion_channel_kses +ionChannelVShift,ion_channel_v_shifts +neuroml,neuroml +condition,condition defaultValue,default_value -inputW,input_ws -continuousConnectionInstance,continuous_connection_instances -start,start -tauDecay1,tau_decay1 -leakConductance,leak_conductance -tauDecay2,tau_decay2 exposure,exposure -subTree,sub_trees +description,description +dimension,dimension +extends,extends +tag,tag +xs:positiveInteger,xs:positive_integer +int,int javax.xml.bind.DatatypeConverter.parseInt,javax.xml.bind._datatype_converter.parse_int -delta,delta -iafTauCell,iaf_tau_cells -pulseGenerator,pulse_generators -iafRefCell,iaf_ref_cells -reset,reset -j,j -postSegment,post_segment javax.xml.bind.DatatypeConverter.printInt,javax.xml.bind._datatype_converter.print_int -z,z -xs:double,xs:double -required,required -int,int -gAmpa,g_ampa -electricalConnection,electrical_connections -synapticConnection,synaptic_connections -gateTypes,gate_types -channelDensityNonUniformNernst,channel_density_non_uniform_nernsts -permeability,permeability -restingConc,resting_conc -basedOn,based_on -leakReversal,leak_reversal -channelDensityNonUniformGHK,channel_density_non_uniform_ghks -gbar_K,gbar_K -initMembPotential,init_memb_potentials -postCell,post_cell -variable,variable -structure,structure -spikeArray,spike_arrays -blockingPlasticSynapse,blocking_plastic_synapses -minimumISI,minimum_isi -time,time +IF_curr_alpha,IF_curr_alpha +IF_curr_exp,IF_curr_exp +IF_cond_alpha,IF_cond_alpha +IF_cond_exp,IF_cond_exp diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index a51fdf5e..37b37a13 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,37 +2,42 @@ # -*- coding: utf-8 -*- # -# Generated Tue Aug 24 12:16:12 2021 by generateDS.py version 2.30.11. -# Python 2.7.18 (default, May 19 2021, 00:00:00) [GCC 11.1.1 20210428 (Red Hat 11.1.1-1)] +# Generated Thu Aug 26 17:22:09 2021 by generateDS.py version 2.39.9. +# Python 3.9.6 (default, Jul 16 2021, 00:00:00) [GCC 11.1.1 20210531 (Red Hat 11.1.1-3)] # # Command line options: # ('-o', 'nml.py') -# ('--use-getter-setter', 'none') +# ('--use-getter-setter', 'new') +# ('--no-warnings', '') # ('--silence', '') -# ('--user-methods', 'helper_methods') +# ('--user-methods', './helper_methods.py') # # Command line arguments: # NeuroML_v2.2.xsd # # Command line: -# /home/asinha/.local/share/virtualenvs/generateds-2.7/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --silence --user-methods="helper_methods" NeuroML_v2.2.xsd +# /home/asinha/.virtualenvs/generateds-39/bin/generateDS.py -o "nml.py" --use-getter-setter="new" --no-warnings --silence --user-methods="./helper_methods.py" NeuroML_v2.2.xsd # # Current working directory (os.getcwd()): # nml # import sys +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os import re as re_ import base64 import datetime as datetime_ -import warnings as warnings_ -try: - from lxml import etree as etree_ -except ImportError: - from xml.etree import ElementTree as etree_ +import decimal as decimal_ +from lxml import etree as etree_ Validate_simpletypes_ = True +SaveElementTreeNode = True if sys.version_info.major == 2: BaseStrType_ = basestring else: @@ -48,6 +53,11 @@ def parsexml_(infile, parser=None, **kwargs): except AttributeError: # fallback to xml.etree parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass doc = etree_.parse(infile, parser=parser, **kwargs) return doc @@ -72,7 +82,7 @@ def parsexmlstring_(instring, parser=None, **kwargs): # definitions. The export method for any class for which there is # a namespace prefix definition, will export that definition in the # XML representation of that element. See the export method of -# any generated element type class for a example of the use of this +# any generated element type class for an example of the use of this # table. # A sample table is: # @@ -83,11 +93,75 @@ def parsexmlstring_(instring, parser=None, **kwargs): # "ElementtypeB": "http://www.xxx.com/namespaceB", # } # +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# try: from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ -except ImportError: +except ModulenotfoundExp_ : GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ModulenotfoundExp_ : + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_ : + + class GdsCollector_(object): + + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_ : + Enum = object # # The root super-class for element type classes @@ -98,9 +172,15 @@ def parsexmlstring_(instring, parser=None, **kwargs): try: from generatedssuper import GeneratedsSuper -except ImportError as exp: +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + class GeneratedsSuperSuper(object): + pass - class GeneratedsSuper(object): + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') class _FixedOffsetTZ(datetime_.tzinfo): def __init__(self, offset, name): @@ -114,6 +194,8 @@ def dst(self, dt): return None def gds_format_string(self, input_data, input_name=''): return input_data + def gds_parse_string(self, input_data, node=None, input_name=''): + return input_data def gds_validate_string(self, input_data, node=None, input_name=''): if not input_data: return '' @@ -125,9 +207,21 @@ def gds_validate_base64(self, input_data, node=None, input_name=''): return input_data def gds_format_integer(self, input_data, input_name=''): return '%d' % input_data + def gds_parse_integer(self, input_data, node=None, input_name=''): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires integer value: %s' % exp) + return ival def gds_validate_integer(self, input_data, node=None, input_name=''): - return input_data + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires integer value') + return value def gds_format_integer_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] return '%s' % ' '.join(input_data) def gds_validate_integer_list( self, input_data, node=None, input_name=''): @@ -136,13 +230,25 @@ def gds_validate_integer_list( try: int(value) except (TypeError, ValueError): - raise_parse_error(node, 'Requires sequence of integers') + raise_parse_error(node, 'Requires sequence of integer values') return values def gds_format_float(self, input_data, input_name=''): return ('%.15f' % input_data).rstrip('0') + def gds_parse_float(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires float or double value: %s' % exp) + return fval_ def gds_validate_float(self, input_data, node=None, input_name=''): - return input_data + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires float value') + return value def gds_format_float_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] return '%s' % ' '.join(input_data) def gds_validate_float_list( self, input_data, node=None, input_name=''): @@ -151,13 +257,57 @@ def gds_validate_float_list( try: float(value) except (TypeError, ValueError): - raise_parse_error(node, 'Requires sequence of floats') + raise_parse_error(node, 'Requires sequence of float values') + return values + def gds_format_decimal(self, input_data, input_name=''): + return_value = '%s' % input_data + if '.' in return_value: + return_value = return_value.rstrip('0') + if return_value.endswith('.'): + return_value = return_value.rstrip('.') + return return_value + def gds_parse_decimal(self, input_data, node=None, input_name=''): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return decimal_value + def gds_validate_decimal(self, input_data, node=None, input_name=''): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires decimal value') + return value + def gds_format_decimal_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return ' '.join([self.gds_format_decimal(item) for item in input_data]) + def gds_validate_decimal_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of decimal values') return values def gds_format_double(self, input_data, input_name=''): - return '%e' % input_data + return '%s' % input_data + def gds_parse_double(self, input_data, node=None, input_name=''): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, 'Requires double or float value: %s' % exp) + return fval_ def gds_validate_double(self, input_data, node=None, input_name=''): - return input_data + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires double or float value') + return value def gds_format_double_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] return '%s' % ' '.join(input_data) def gds_validate_double_list( self, input_data, node=None, input_name=''): @@ -166,23 +316,40 @@ def gds_validate_double_list( try: float(value) except (TypeError, ValueError): - raise_parse_error(node, 'Requires sequence of doubles') + raise_parse_error( + node, 'Requires sequence of double or float values') return values def gds_format_boolean(self, input_data, input_name=''): return ('%s' % input_data).lower() + def gds_parse_boolean(self, input_data, node=None, input_name=''): + if input_data in ('true', '1'): + bval = True + elif input_data in ('false', '0'): + bval = False + else: + raise_parse_error(node, 'Requires boolean value') + return bval def gds_validate_boolean(self, input_data, node=None, input_name=''): + if input_data not in (True, 1, False, 0, ): + raise_parse_error( + node, + 'Requires boolean value ' + '(one of True, 1, False, 0)') return input_data def gds_format_boolean_list(self, input_data, input_name=''): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] return '%s' % ' '.join(input_data) def gds_validate_boolean_list( self, input_data, node=None, input_name=''): values = input_data.split() for value in values: - if value not in ('true', '1', 'false', '0', ): + value = self.gds_parse_boolean(value, node, input_name) + if value not in (True, 1, False, 0, ): raise_parse_error( node, - 'Requires sequence of booleans ' - '("true", "1", "false", "0")') + 'Requires sequence of boolean values ' + '(one of True, 1, False, 0)') return values def gds_validate_datetime(self, input_data, node=None, input_name=''): return input_data @@ -367,6 +534,50 @@ def gds_parse_time(cls, input_data): dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') dt = dt.replace(tzinfo=tz) return dt.time() + def gds_check_cardinality_( + self, value, input_name, + min_occurs=0, max_occurs=1, required=None): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None : + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_())) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + min_occurs, length)) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), + max_occurs, length)) + def gds_validate_builtin_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_validate_defined_ST_( + self, validator, value, input_name, + min_occurs=None, max_occurs=None, required=None): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) def gds_str_lower(self, instring): return instring.lower() def get_path_(self, node): @@ -396,7 +607,10 @@ def get_class_obj_(self, node, default_class=None): class_obj1 = class_obj2 return class_obj1 def gds_build_any(self, node, type_name=None): - return None + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content @classmethod def gds_reverse_node_mapping(cls, mapping): return dict(((v, k) for k, v in mapping.items())) @@ -420,11 +634,34 @@ def convert_unicode(instring): result = GeneratedsSuper.gds_encode(str(instring)) return result def __eq__(self, other): + def excl_select_objs_(obj): + return (obj[0] != 'parent_object_' and + obj[0] != 'gds_collector_') if type(self) != type(other): return False - return self.__dict__ == other.__dict__ + return all(x == y for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()))) def __ne__(self, other): return not self.__eq__(other) + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + def gds_get_node_lineno_(self): + if (hasattr(self, "gds_elementtree_node_") and + self.gds_elementtree_node_ is not None): + return ' near line {}'.format( + self.gds_elementtree_node_.sourceline) + else: + return "" + def getSubclassFromModule_(module, class_): '''Get the subclass of a class from a specific module.''' @@ -455,6 +692,10 @@ def getSubclassFromModule_(module, class_): # ExternalEncoding = '' +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} Tag_pattern_ = re_.compile(r'({.*})?(.*)') String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') @@ -550,18 +791,26 @@ def find_attr_value_(attr_name, node): value = attrs.get(attr_name) elif len(attr_parts) == 2: prefix, name = attr_parts - namespace = node.nsmap.get(prefix) + if prefix == 'xml': + namespace = 'http://www.w3.org/XML/1998/namespace' + else: + namespace = node.nsmap.get(prefix) if namespace is not None: value = attrs.get('{%s}%s' % (namespace, name, )) return value +def encode_str_2_3(instr): + return instr + + class GDSParseError(Exception): pass def raise_parse_error(node, msg): - msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + if node is not None: + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) raise GDSParseError(msg) @@ -626,7 +875,7 @@ def exportSimple(self, outfile, level, name): self.name, base64.b64encode(self.value), self.name)) - def to_etree(self, element): + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): if self.category == MixedContainer.CategoryText: # Prevent exporting empty content as empty lines. if self.value.strip(): @@ -646,7 +895,7 @@ def to_etree(self, element): subelement.text = self.to_etree_simple() else: # category == MixedContainer.CategoryComplex self.value.to_etree(element) - def to_etree_simple(self): + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): if self.content_type == MixedContainer.TypeString: text = self.value elif (self.content_type == MixedContainer.TypeInteger or @@ -724,25 +973,31 @@ def _cast(typ, value): # -class BlockTypes(object): +class BlockTypes(str, Enum): VOLTAGE_CONC_DEP_BLOCK_MECHANISM='voltageConcDepBlockMechanism' -class Metric(object): +class Metric(str, Enum): + """Metric -- Allowed metrics for InhomogeneousParam + + """ PATH_LENGTHFROMROOT='Path Length from root' -class PlasticityTypes(object): +class PlasticityTypes(str, Enum): TSODYKS_MARKRAM_DEP_MECHANISM='tsodyksMarkramDepMechanism' TSODYKS_MARKRAM_DEP_FAC_MECHANISM='tsodyksMarkramDepFacMechanism' -class ZeroOrOne(object): +class ZeroOrOne(str, Enum): + """ZeroOrOne -- Value which is either 0 or 1 + + """ _0='0' _1='1' -class allowedSpaces(object): +class allowedSpaces(str, Enum): EUCLIDEAN__1_D='Euclidean_1D' EUCLIDEAN__2_D='Euclidean_2D' EUCLIDEAN__3_D='Euclidean_3D' @@ -751,12 +1006,12 @@ class allowedSpaces(object): GRID__3_D='Grid_3D' -class channelTypes(object): +class channelTypes(str, Enum): ION_CHANNEL_PASSIVE='ionChannelPassive' ION_CHANNEL_HH='ionChannelHH' -class gateTypes(object): +class gateTypes(str, Enum): GATE_H_HRATES='gateHHrates' GATE_H_HRATES_TAU='gateHHratesTau' GATE_H_HTAU_INF='gateHHtauInf' @@ -767,29 +1022,37 @@ class gateTypes(object): GATE_FRACTIONAL='gateFractional' -class networkTypes(object): +class networkTypes(str, Enum): NETWORK='network' NETWORK_WITH_TEMPERATURE='networkWithTemperature' -class populationTypes(object): +class populationTypes(str, Enum): POPULATION='population' POPULATION_LIST='populationList' class Property(GeneratedsSuper): - """Generic property with a tag and value""" + """Property -- Generic property with a tag and value + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tag', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('value', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_('tag', 'xs:string', 0, 0, {'use': 'required', 'name': 'tag'}), + MemberSpec_('value', 'xs:string', 0, 0, {'use': 'required', 'name': 'value'}), ] subclass = None superclass = None - def __init__(self, tag=None, value=None, **kwargs_): + def __init__(self, tag=None, value=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.tag = _cast(None, tag) + self.tag_nsprefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -801,7 +1064,19 @@ def factory(*args_, **kwargs_): else: return Property(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_tag(self): + return self.tag + def set_tag(self, tag): + self.tag = tag + def get_value(self): + return self.value + def set_value(self, value): + self.value = value + def _hasContent(self): if ( ): @@ -816,35 +1091,41 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='P eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Property': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Property') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Property') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Property', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Property', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Property'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Property'): if self.tag is not None and 'tag' not in already_processed: already_processed.add('tag') outfile.write(' tag=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tag), input_name='tag')), )) if self.value is not None and 'value' not in already_processed: already_processed.add('value') outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Property', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Property', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('tag', node) if value is not None and 'tag' not in already_processed: already_processed.add('tag') @@ -853,21 +1134,27 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'value' not in already_processed: already_processed.add('value') self.value = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class Property class Annotation(GeneratedsSuper): - """Placeholder for MIRIAM related metadata, among others.""" + """Annotation -- Placeholder for MIRIAM related metadata, among others. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_('__ANY__', '__ANY__', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'processContents': 'skip'}, None), ] subclass = None superclass = None - def __init__(self, anytypeobjs_=None, **kwargs_): + def __init__(self, anytypeobjs_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None if anytypeobjs_ is None: self.anytypeobjs_ = [] else: @@ -883,14 +1170,22 @@ def factory(*args_, **kwargs_): else: return Annotation(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_anytypeobjs_(self): return self.anytypeobjs_ + def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_ + def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value) + def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value + def _hasContent(self): if ( self.anytypeobjs_ ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Annotation', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Annotation', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Annotation') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -898,95 +1193,118 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='A eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Annotation': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Annotation') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Annotation') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Annotation', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Annotation', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Annotation'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Annotation'): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Annotation', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Annotation', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' - for obj_ in self.anytypeobjs_: - obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if not fromsubclass_: + for obj_ in self.anytypeobjs_: + showIndent(outfile, level, pretty_print) + outfile.write(obj_) + outfile.write('\n') + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): pass - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'Annotation') - if obj_ is not None: - self.add_anytypeobjs_(obj_) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + content_ = self.gds_build_any(child_, 'Annotation') + self.add_anytypeobjs_(content_) # end class Annotation class ComponentType(GeneratedsSuper): - """Contains an extension to NeuroML by creating custom LEMS - ComponentType.""" + """ComponentType -- Contains an extension to NeuroML by creating custom LEMS ComponentType. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('extends', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('Property', 'Property', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'LEMS_Property', u'name': u'Property', u'minOccurs': u'0'}, None), - MemberSpec_('Parameter', 'Parameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Parameter', u'name': u'Parameter', u'minOccurs': u'0'}, None), - MemberSpec_('Constant', 'Constant', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Constant', u'name': u'Constant', u'minOccurs': u'0'}, None), - MemberSpec_('Exposure', 'Exposure', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Exposure', u'name': u'Exposure', u'minOccurs': u'0'}, None), - MemberSpec_('Requirement', 'Requirement', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Requirement', u'name': u'Requirement', u'minOccurs': u'0'}, None), - MemberSpec_('InstanceRequirement', 'InstanceRequirement', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InstanceRequirement', u'name': u'InstanceRequirement', u'minOccurs': u'0'}, None), - MemberSpec_('Dynamics', 'Dynamics', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Dynamics', u'name': u'Dynamics', u'minOccurs': u'0'}, None), + MemberSpec_('name', 'xs:string', 0, 0, {'use': 'required', 'name': 'name'}), + MemberSpec_('extends', 'xs:string', 0, 1, {'use': 'optional', 'name': 'extends'}), + MemberSpec_('description', 'xs:string', 0, 1, {'use': 'optional', 'name': 'description'}), + MemberSpec_('Property', 'Property', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Property', 'type': 'LEMS_Property'}, None), + MemberSpec_('Parameter', 'Parameter', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Parameter', 'type': 'Parameter'}, None), + MemberSpec_('Constant', 'Constant', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Constant', 'type': 'Constant'}, None), + MemberSpec_('Exposure', 'Exposure', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Exposure', 'type': 'Exposure'}, None), + MemberSpec_('Requirement', 'Requirement', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Requirement', 'type': 'Requirement'}, None), + MemberSpec_('InstanceRequirement', 'InstanceRequirement', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'InstanceRequirement', 'type': 'InstanceRequirement'}, None), + MemberSpec_('Dynamics', 'Dynamics', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Dynamics', 'type': 'Dynamics'}, None), ] subclass = None superclass = None - def __init__(self, name=None, extends=None, description=None, Property=None, Parameter=None, Constant=None, Exposure=None, Requirement=None, InstanceRequirement=None, Dynamics=None, **kwargs_): + def __init__(self, name=None, extends=None, description=None, Property=None, Parameter=None, Constant=None, Exposure=None, Requirement=None, InstanceRequirement=None, Dynamics=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.name = _cast(None, name) + self.name_nsprefix_ = None self.extends = _cast(None, extends) + self.extends_nsprefix_ = None self.description = _cast(None, description) + self.description_nsprefix_ = None if Property is None: self.Property = [] else: self.Property = Property + self.Property_nsprefix_ = None if Parameter is None: self.Parameter = [] else: self.Parameter = Parameter + self.Parameter_nsprefix_ = None if Constant is None: self.Constant = [] else: self.Constant = Constant + self.Constant_nsprefix_ = None if Exposure is None: self.Exposure = [] else: self.Exposure = Exposure + self.Exposure_nsprefix_ = None if Requirement is None: self.Requirement = [] else: self.Requirement = Requirement + self.Requirement_nsprefix_ = None if InstanceRequirement is None: self.InstanceRequirement = [] else: self.InstanceRequirement = InstanceRequirement + self.InstanceRequirement_nsprefix_ = None if Dynamics is None: self.Dynamics = [] else: self.Dynamics = Dynamics + self.Dynamics_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -998,7 +1316,93 @@ def factory(*args_, **kwargs_): else: return ComponentType(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Property(self): + return self.Property + def set_Property(self, Property): + self.Property = Property + def add_Property(self, value): + self.Property.append(value) + def insert_Property_at(self, index, value): + self.Property.insert(index, value) + def replace_Property_at(self, index, value): + self.Property[index] = value + def get_Parameter(self): + return self.Parameter + def set_Parameter(self, Parameter): + self.Parameter = Parameter + def add_Parameter(self, value): + self.Parameter.append(value) + def insert_Parameter_at(self, index, value): + self.Parameter.insert(index, value) + def replace_Parameter_at(self, index, value): + self.Parameter[index] = value + def get_Constant(self): + return self.Constant + def set_Constant(self, Constant): + self.Constant = Constant + def add_Constant(self, value): + self.Constant.append(value) + def insert_Constant_at(self, index, value): + self.Constant.insert(index, value) + def replace_Constant_at(self, index, value): + self.Constant[index] = value + def get_Exposure(self): + return self.Exposure + def set_Exposure(self, Exposure): + self.Exposure = Exposure + def add_Exposure(self, value): + self.Exposure.append(value) + def insert_Exposure_at(self, index, value): + self.Exposure.insert(index, value) + def replace_Exposure_at(self, index, value): + self.Exposure[index] = value + def get_Requirement(self): + return self.Requirement + def set_Requirement(self, Requirement): + self.Requirement = Requirement + def add_Requirement(self, value): + self.Requirement.append(value) + def insert_Requirement_at(self, index, value): + self.Requirement.insert(index, value) + def replace_Requirement_at(self, index, value): + self.Requirement[index] = value + def get_InstanceRequirement(self): + return self.InstanceRequirement + def set_InstanceRequirement(self, InstanceRequirement): + self.InstanceRequirement = InstanceRequirement + def add_InstanceRequirement(self, value): + self.InstanceRequirement.append(value) + def insert_InstanceRequirement_at(self, index, value): + self.InstanceRequirement.insert(index, value) + def replace_InstanceRequirement_at(self, index, value): + self.InstanceRequirement[index] = value + def get_Dynamics(self): + return self.Dynamics + def set_Dynamics(self, Dynamics): + self.Dynamics = Dynamics + def add_Dynamics(self, value): + self.Dynamics.append(value) + def insert_Dynamics_at(self, index, value): + self.Dynamics.insert(index, value) + def replace_Dynamics_at(self, index, value): + self.Dynamics[index] = value + def get_name(self): + return self.name + def set_name(self, name): + self.name = name + def get_extends(self): + return self.extends + def set_extends(self, extends): + self.extends = extends + def get_description(self): + return self.description + def set_description(self, description): + self.description = description + def _hasContent(self): if ( self.Property or self.Parameter or @@ -1011,7 +1415,7 @@ def hasContent_(self): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ComponentType', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ComponentType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ComponentType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -1019,20 +1423,22 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ComponentType': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ComponentType') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ComponentType') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ComponentType', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ComponentType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ComponentType'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ComponentType'): if self.name is not None and 'name' not in already_processed: already_processed.add('name') outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) @@ -1042,33 +1448,44 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.description is not None and 'description' not in already_processed: already_processed.add('description') outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ComponentType', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ComponentType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for Property_ in self.Property: + namespaceprefix_ = self.Property_nsprefix_ + ':' if (UseCapturedNS_ and self.Property_nsprefix_) else '' Property_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Property', pretty_print=pretty_print) for Parameter_ in self.Parameter: + namespaceprefix_ = self.Parameter_nsprefix_ + ':' if (UseCapturedNS_ and self.Parameter_nsprefix_) else '' Parameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Parameter', pretty_print=pretty_print) for Constant_ in self.Constant: + namespaceprefix_ = self.Constant_nsprefix_ + ':' if (UseCapturedNS_ and self.Constant_nsprefix_) else '' Constant_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Constant', pretty_print=pretty_print) for Exposure_ in self.Exposure: + namespaceprefix_ = self.Exposure_nsprefix_ + ':' if (UseCapturedNS_ and self.Exposure_nsprefix_) else '' Exposure_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Exposure', pretty_print=pretty_print) for Requirement_ in self.Requirement: + namespaceprefix_ = self.Requirement_nsprefix_ + ':' if (UseCapturedNS_ and self.Requirement_nsprefix_) else '' Requirement_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Requirement', pretty_print=pretty_print) for InstanceRequirement_ in self.InstanceRequirement: + namespaceprefix_ = self.InstanceRequirement_nsprefix_ + ':' if (UseCapturedNS_ and self.InstanceRequirement_nsprefix_) else '' InstanceRequirement_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='InstanceRequirement', pretty_print=pretty_print) for Dynamics_ in self.Dynamics: + namespaceprefix_ = self.Dynamics_nsprefix_ + ':' if (UseCapturedNS_ and self.Dynamics_nsprefix_) else '' Dynamics_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Dynamics', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('name', node) if value is not None and 'name' not in already_processed: already_processed.add('name') @@ -1081,62 +1498,72 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'description' not in already_processed: already_processed.add('description') self.description = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Property': obj_ = LEMS_Property.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Property.append(obj_) obj_.original_tagname_ = 'Property' elif nodeName_ == 'Parameter': obj_ = Parameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Parameter.append(obj_) obj_.original_tagname_ = 'Parameter' elif nodeName_ == 'Constant': obj_ = Constant.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Constant.append(obj_) obj_.original_tagname_ = 'Constant' elif nodeName_ == 'Exposure': obj_ = Exposure.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Exposure.append(obj_) obj_.original_tagname_ = 'Exposure' elif nodeName_ == 'Requirement': obj_ = Requirement.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Requirement.append(obj_) obj_.original_tagname_ = 'Requirement' elif nodeName_ == 'InstanceRequirement': obj_ = InstanceRequirement.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.InstanceRequirement.append(obj_) obj_.original_tagname_ = 'InstanceRequirement' elif nodeName_ == 'Dynamics': obj_ = Dynamics.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Dynamics.append(obj_) obj_.original_tagname_ = 'Dynamics' # end class ComponentType class Constant(GeneratedsSuper): - """LEMS ComponentType for Constant.""" + """Constant -- LEMS ComponentType for Constant. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('dimension', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('value', 'Nml2Quantity', 0, 0, {'use': u'required'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_('name', 'xs:string', 0, 0, {'use': 'required', 'name': 'name'}), + MemberSpec_('dimension', 'xs:string', 0, 0, {'use': 'required', 'name': 'dimension'}), + MemberSpec_('value', 'Nml2Quantity', 0, 0, {'use': 'required', 'name': 'value'}), + MemberSpec_('description', 'xs:string', 0, 1, {'use': 'optional', 'name': 'description'}), ] subclass = None superclass = None - def __init__(self, name=None, dimension=None, value=None, description=None, **kwargs_): + def __init__(self, name=None, dimension=None, value=None, description=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.name = _cast(None, name) + self.name_nsprefix_ = None self.dimension = _cast(None, dimension) + self.dimension_nsprefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None self.description = _cast(None, description) + self.description_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -1148,14 +1575,38 @@ def factory(*args_, **kwargs_): else: return Constant(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_name(self): + return self.name + def set_name(self, name): + self.name = name + def get_dimension(self): + return self.dimension + def set_dimension(self, dimension): + self.dimension = dimension + def get_value(self): + return self.value + def set_value(self, value): + self.value = value + def get_description(self): + return self.description + def set_description(self, description): + self.description = description def validate_Nml2Quantity(self, value): # Validate type Nml2Quantity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_patterns_, )) - validate_Nml2Quantity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*([_a-zA-Z0-9])*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_patterns_, )) + validate_Nml2Quantity_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*([_a-zA-Z0-9])*)$']] + def _hasContent(self): if ( ): @@ -1170,19 +1621,21 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Constant': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Constant') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Constant') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Constant', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Constant', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Constant'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Constant'): if self.name is not None and 'name' not in already_processed: already_processed.add('name') outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) @@ -1191,20 +1644,24 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' outfile.write(' dimension=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dimension), input_name='dimension')), )) if self.value is not None and 'value' not in already_processed: already_processed.add('value') - outfile.write(' value=%s' % (quote_attrib(self.value), )) + outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) if self.description is not None and 'description' not in already_processed: already_processed.add('description') outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Constant', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Constant', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('name', node) if value is not None and 'name' not in already_processed: already_processed.add('name') @@ -1222,26 +1679,35 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'description' not in already_processed: already_processed.add('description') self.description = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class Constant class Exposure(GeneratedsSuper): - """LEMS Exposure (ComponentType property)""" + """Exposure -- LEMS Exposure (ComponentType property) + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('dimension', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_('name', 'xs:string', 0, 0, {'use': 'required', 'name': 'name'}), + MemberSpec_('dimension', 'xs:string', 0, 0, {'use': 'required', 'name': 'dimension'}), + MemberSpec_('description', 'xs:string', 0, 1, {'use': 'optional', 'name': 'description'}), ] subclass = None superclass = None - def __init__(self, name=None, dimension=None, description=None, **kwargs_): + def __init__(self, name=None, dimension=None, description=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.name = _cast(None, name) + self.name_nsprefix_ = None self.dimension = _cast(None, dimension) + self.dimension_nsprefix_ = None self.description = _cast(None, description) + self.description_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -1253,7 +1719,23 @@ def factory(*args_, **kwargs_): else: return Exposure(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_name(self): + return self.name + def set_name(self, name): + self.name = name + def get_dimension(self): + return self.dimension + def set_dimension(self, dimension): + self.dimension = dimension + def get_description(self): + return self.description + def set_description(self, description): + self.description = description + def _hasContent(self): if ( ): @@ -1268,19 +1750,21 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Exposure': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Exposure') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Exposure') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Exposure', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Exposure', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Exposure'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Exposure'): if self.name is not None and 'name' not in already_processed: already_processed.add('name') outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) @@ -1290,16 +1774,20 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.description is not None and 'description' not in already_processed: already_processed.add('description') outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Exposure', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Exposure', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('name', node) if value is not None and 'name' not in already_processed: already_processed.add('name') @@ -1312,25 +1800,32 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'description' not in already_processed: already_processed.add('description') self.description = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class Exposure class NamedDimensionalType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('dimension', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_('name', 'xs:string', 0, 0, {'use': 'required', 'name': 'name'}), + MemberSpec_('dimension', 'xs:string', 0, 0, {'use': 'required', 'name': 'dimension'}), + MemberSpec_('description', 'xs:string', 0, 1, {'use': 'optional', 'name': 'description'}), ] subclass = None superclass = None - def __init__(self, name=None, dimension=None, description=None, extensiontype_=None, **kwargs_): + def __init__(self, name=None, dimension=None, description=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.name = _cast(None, name) + self.name_nsprefix_ = None self.dimension = _cast(None, dimension) + self.dimension_nsprefix_ = None self.description = _cast(None, description) + self.description_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -1343,7 +1838,25 @@ def factory(*args_, **kwargs_): else: return NamedDimensionalType(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_name(self): + return self.name + def set_name(self, name): + self.name = name + def get_dimension(self): + return self.dimension + def set_dimension(self, dimension): + self.dimension = dimension + def get_description(self): + return self.description + def set_description(self, description): + self.description = description + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( ): @@ -1358,19 +1871,21 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='N eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'NamedDimensionalType': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NamedDimensionalType') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NamedDimensionalType') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NamedDimensionalType', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NamedDimensionalType', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NamedDimensionalType'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NamedDimensionalType'): if self.name is not None and 'name' not in already_processed: already_processed.add('name') outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) @@ -1383,17 +1898,25 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NamedDimensionalType', fromsubclass_=False, pretty_print=True): + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NamedDimensionalType', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('name', node) if value is not None and 'name' not in already_processed: already_processed.add('name') @@ -1410,27 +1933,35 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class NamedDimensionalType class NamedDimensionalVariable(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('dimension', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('exposure', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_('name', 'xs:string', 0, 0, {'use': 'required', 'name': 'name'}), + MemberSpec_('dimension', 'xs:string', 0, 0, {'use': 'required', 'name': 'dimension'}), + MemberSpec_('description', 'xs:string', 0, 1, {'use': 'optional', 'name': 'description'}), + MemberSpec_('exposure', 'xs:string', 0, 1, {'use': 'optional', 'name': 'exposure'}), ] subclass = None superclass = None - def __init__(self, name=None, dimension=None, description=None, exposure=None, extensiontype_=None, **kwargs_): + def __init__(self, name=None, dimension=None, description=None, exposure=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.name = _cast(None, name) + self.name_nsprefix_ = None self.dimension = _cast(None, dimension) + self.dimension_nsprefix_ = None self.description = _cast(None, description) + self.description_nsprefix_ = None self.exposure = _cast(None, exposure) + self.exposure_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -1443,7 +1974,29 @@ def factory(*args_, **kwargs_): else: return NamedDimensionalVariable(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_name(self): + return self.name + def set_name(self, name): + self.name = name + def get_dimension(self): + return self.dimension + def set_dimension(self, dimension): + self.dimension = dimension + def get_description(self): + return self.description + def set_description(self, description): + self.description = description + def get_exposure(self): + return self.exposure + def set_exposure(self, exposure): + self.exposure = exposure + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( ): @@ -1458,19 +2011,21 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='N eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'NamedDimensionalVariable': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NamedDimensionalVariable') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NamedDimensionalVariable') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NamedDimensionalVariable', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NamedDimensionalVariable', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NamedDimensionalVariable'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NamedDimensionalVariable'): if self.name is not None and 'name' not in already_processed: already_processed.add('name') outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) @@ -1486,17 +2041,25 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NamedDimensionalVariable', fromsubclass_=False, pretty_print=True): + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NamedDimensionalVariable', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('name', node) if value is not None and 'name' not in already_processed: already_processed.add('name') @@ -1517,20 +2080,24 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class NamedDimensionalVariable class Parameter(NamedDimensionalType): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = NamedDimensionalType - def __init__(self, name=None, dimension=None, description=None, **kwargs_): + def __init__(self, name=None, dimension=None, description=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Parameter, self).__init__(name, dimension, description, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Parameter"), self).__init__(name, dimension, description, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -1542,9 +2109,13 @@ def factory(*args_, **kwargs_): else: return Parameter(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(Parameter, self).hasContent_() + super(Parameter, self)._hasContent() ): return True else: @@ -1557,49 +2128,60 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='P eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Parameter': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Parameter') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Parameter') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Parameter', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Parameter', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Parameter'): - super(Parameter, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Parameter') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Parameter', fromsubclass_=False, pretty_print=True): - super(Parameter, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Parameter'): + super(Parameter, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Parameter') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Parameter', fromsubclass_=False, pretty_print=True): + super(Parameter, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(Parameter, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Parameter, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(Parameter, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(Parameter, self)._buildChildren(child_, node, nodeName_, True) pass # end class Parameter class LEMS_Property(NamedDimensionalType): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('default_value', 'xs:double', 0, 1, {'use': u'optional'}), + MemberSpec_('default_value', 'xs:double', 0, 1, {'use': 'optional', 'name': 'default_value'}), ] subclass = None superclass = NamedDimensionalType - def __init__(self, name=None, dimension=None, description=None, default_value=None, **kwargs_): + def __init__(self, name=None, dimension=None, description=None, default_value=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(LEMS_Property, self).__init__(name, dimension, description, **kwargs_) + self.ns_prefix_ = None + super(globals().get("LEMS_Property"), self).__init__(name, dimension, description, **kwargs_) self.default_value = _cast(float, default_value) + self.default_value_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -1611,9 +2193,17 @@ def factory(*args_, **kwargs_): else: return LEMS_Property(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_defaultValue(self): + return self.default_value + def set_defaultValue(self, default_value): + self.default_value = default_value + def _hasContent(self): if ( - super(LEMS_Property, self).hasContent_() + super(LEMS_Property, self)._hasContent() ): return True else: @@ -1626,57 +2216,65 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='L eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'LEMS_Property': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LEMS_Property') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LEMS_Property') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LEMS_Property', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LEMS_Property', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LEMS_Property'): - super(LEMS_Property, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LEMS_Property') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LEMS_Property'): + super(LEMS_Property, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LEMS_Property') if self.default_value is not None and 'default_value' not in already_processed: already_processed.add('default_value') outfile.write(' defaultValue="%s"' % self.gds_format_double(self.default_value, input_name='defaultValue')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LEMS_Property', fromsubclass_=False, pretty_print=True): - super(LEMS_Property, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LEMS_Property', fromsubclass_=False, pretty_print=True): + super(LEMS_Property, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('defaultValue', node) if value is not None and 'defaultValue' not in already_processed: already_processed.add('defaultValue') - try: - self.default_value = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (defaultValue): %s' % exp) - super(LEMS_Property, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(LEMS_Property, self).buildChildren(child_, node, nodeName_, True) + value = self.gds_parse_double(value, node, 'defaultValue') + self.default_value = value + super(LEMS_Property, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(LEMS_Property, self)._buildChildren(child_, node, nodeName_, True) pass # end class LEMS_Property class Requirement(NamedDimensionalType): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = NamedDimensionalType - def __init__(self, name=None, dimension=None, description=None, **kwargs_): + def __init__(self, name=None, dimension=None, description=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Requirement, self).__init__(name, dimension, description, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Requirement"), self).__init__(name, dimension, description, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -1688,9 +2286,13 @@ def factory(*args_, **kwargs_): else: return Requirement(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(Requirement, self).hasContent_() + super(Requirement, self)._hasContent() ): return True else: @@ -1703,50 +2305,62 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='R eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Requirement': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Requirement') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Requirement') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Requirement', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Requirement', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Requirement'): - super(Requirement, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Requirement') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Requirement', fromsubclass_=False, pretty_print=True): - super(Requirement, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Requirement'): + super(Requirement, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Requirement') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Requirement', fromsubclass_=False, pretty_print=True): + super(Requirement, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(Requirement, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Requirement, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(Requirement, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(Requirement, self)._buildChildren(child_, node, nodeName_, True) pass # end class Requirement class InstanceRequirement(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('type', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_('name', 'xs:string', 0, 0, {'use': 'required', 'name': 'name'}), + MemberSpec_('type', 'xs:string', 0, 0, {'use': 'required', 'name': 'type'}), ] subclass = None superclass = None - def __init__(self, name=None, type=None, **kwargs_): + def __init__(self, name=None, type=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.name = _cast(None, name) + self.name_nsprefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -1758,7 +2372,19 @@ def factory(*args_, **kwargs_): else: return InstanceRequirement(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_name(self): + return self.name + def set_name(self, name): + self.name = name + def get_type(self): + return self.type + def set_type(self, type): + self.type = type + def _hasContent(self): if ( ): @@ -1773,35 +2399,41 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'InstanceRequirement': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InstanceRequirement') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InstanceRequirement') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InstanceRequirement', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InstanceRequirement', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InstanceRequirement'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InstanceRequirement'): if self.name is not None and 'name' not in already_processed: already_processed.add('name') outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) if self.type is not None and 'type' not in already_processed: already_processed.add('type') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InstanceRequirement', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InstanceRequirement', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('name', node) if value is not None and 'name' not in already_processed: already_processed.add('name') @@ -1810,40 +2442,50 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'type' not in already_processed: already_processed.add('type') self.type = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class InstanceRequirement class Dynamics(GeneratedsSuper): - """LEMS ComponentType for Dynamics""" + """Dynamics -- LEMS ComponentType for Dynamics + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('StateVariable', 'StateVariable', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'StateVariable', u'name': u'StateVariable', u'minOccurs': u'0'}, None), - MemberSpec_('DerivedVariable', 'DerivedVariable', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'DerivedVariable', u'name': u'DerivedVariable', u'minOccurs': u'0'}, None), - MemberSpec_('ConditionalDerivedVariable', 'ConditionalDerivedVariable', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ConditionalDerivedVariable', u'name': u'ConditionalDerivedVariable', u'minOccurs': u'0'}, None), - MemberSpec_('TimeDerivative', 'TimeDerivative', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'TimeDerivative', u'name': u'TimeDerivative', u'minOccurs': u'0'}, None), + MemberSpec_('StateVariable', 'StateVariable', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'StateVariable', 'type': 'StateVariable'}, None), + MemberSpec_('DerivedVariable', 'DerivedVariable', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'DerivedVariable', 'type': 'DerivedVariable'}, None), + MemberSpec_('ConditionalDerivedVariable', 'ConditionalDerivedVariable', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'ConditionalDerivedVariable', 'type': 'ConditionalDerivedVariable'}, None), + MemberSpec_('TimeDerivative', 'TimeDerivative', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TimeDerivative', 'type': 'TimeDerivative'}, None), ] subclass = None superclass = None - def __init__(self, StateVariable=None, DerivedVariable=None, ConditionalDerivedVariable=None, TimeDerivative=None, **kwargs_): + def __init__(self, StateVariable=None, DerivedVariable=None, ConditionalDerivedVariable=None, TimeDerivative=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None if StateVariable is None: self.StateVariable = [] else: self.StateVariable = StateVariable + self.StateVariable_nsprefix_ = None if DerivedVariable is None: self.DerivedVariable = [] else: self.DerivedVariable = DerivedVariable + self.DerivedVariable_nsprefix_ = None if ConditionalDerivedVariable is None: self.ConditionalDerivedVariable = [] else: self.ConditionalDerivedVariable = ConditionalDerivedVariable + self.ConditionalDerivedVariable_nsprefix_ = None if TimeDerivative is None: self.TimeDerivative = [] else: self.TimeDerivative = TimeDerivative + self.TimeDerivative_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -1855,7 +2497,51 @@ def factory(*args_, **kwargs_): else: return Dynamics(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_StateVariable(self): + return self.StateVariable + def set_StateVariable(self, StateVariable): + self.StateVariable = StateVariable + def add_StateVariable(self, value): + self.StateVariable.append(value) + def insert_StateVariable_at(self, index, value): + self.StateVariable.insert(index, value) + def replace_StateVariable_at(self, index, value): + self.StateVariable[index] = value + def get_DerivedVariable(self): + return self.DerivedVariable + def set_DerivedVariable(self, DerivedVariable): + self.DerivedVariable = DerivedVariable + def add_DerivedVariable(self, value): + self.DerivedVariable.append(value) + def insert_DerivedVariable_at(self, index, value): + self.DerivedVariable.insert(index, value) + def replace_DerivedVariable_at(self, index, value): + self.DerivedVariable[index] = value + def get_ConditionalDerivedVariable(self): + return self.ConditionalDerivedVariable + def set_ConditionalDerivedVariable(self, ConditionalDerivedVariable): + self.ConditionalDerivedVariable = ConditionalDerivedVariable + def add_ConditionalDerivedVariable(self, value): + self.ConditionalDerivedVariable.append(value) + def insert_ConditionalDerivedVariable_at(self, index, value): + self.ConditionalDerivedVariable.insert(index, value) + def replace_ConditionalDerivedVariable_at(self, index, value): + self.ConditionalDerivedVariable[index] = value + def get_TimeDerivative(self): + return self.TimeDerivative + def set_TimeDerivative(self, TimeDerivative): + self.TimeDerivative = TimeDerivative + def add_TimeDerivative(self, value): + self.TimeDerivative.append(value) + def insert_TimeDerivative_at(self, index, value): + self.TimeDerivative.insert(index, value) + def replace_TimeDerivative_at(self, index, value): + self.TimeDerivative[index] = value + def _hasContent(self): if ( self.StateVariable or self.DerivedVariable or @@ -1865,7 +2551,7 @@ def hasContent_(self): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Dynamics', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Dynamics', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Dynamics') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -1873,81 +2559,99 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='D eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Dynamics': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Dynamics') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Dynamics') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Dynamics', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Dynamics', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Dynamics'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Dynamics'): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Dynamics', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Dynamics', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for StateVariable_ in self.StateVariable: + namespaceprefix_ = self.StateVariable_nsprefix_ + ':' if (UseCapturedNS_ and self.StateVariable_nsprefix_) else '' StateVariable_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='StateVariable', pretty_print=pretty_print) for DerivedVariable_ in self.DerivedVariable: + namespaceprefix_ = self.DerivedVariable_nsprefix_ + ':' if (UseCapturedNS_ and self.DerivedVariable_nsprefix_) else '' DerivedVariable_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DerivedVariable', pretty_print=pretty_print) for ConditionalDerivedVariable_ in self.ConditionalDerivedVariable: + namespaceprefix_ = self.ConditionalDerivedVariable_nsprefix_ + ':' if (UseCapturedNS_ and self.ConditionalDerivedVariable_nsprefix_) else '' ConditionalDerivedVariable_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ConditionalDerivedVariable', pretty_print=pretty_print) for TimeDerivative_ in self.TimeDerivative: + namespaceprefix_ = self.TimeDerivative_nsprefix_ + ':' if (UseCapturedNS_ and self.TimeDerivative_nsprefix_) else '' TimeDerivative_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TimeDerivative', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): pass - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'StateVariable': obj_ = StateVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.StateVariable.append(obj_) obj_.original_tagname_ = 'StateVariable' elif nodeName_ == 'DerivedVariable': obj_ = DerivedVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.DerivedVariable.append(obj_) obj_.original_tagname_ = 'DerivedVariable' elif nodeName_ == 'ConditionalDerivedVariable': obj_ = ConditionalDerivedVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ConditionalDerivedVariable.append(obj_) obj_.original_tagname_ = 'ConditionalDerivedVariable' elif nodeName_ == 'TimeDerivative': obj_ = TimeDerivative.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.TimeDerivative.append(obj_) obj_.original_tagname_ = 'TimeDerivative' # end class Dynamics class DerivedVariable(NamedDimensionalVariable): - """LEMS ComponentType for DerivedVariable""" + """DerivedVariable -- LEMS ComponentType for DerivedVariable + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('value', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('select', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_('value', 'xs:string', 0, 1, {'use': 'optional', 'name': 'value'}), + MemberSpec_('select', 'xs:string', 0, 1, {'use': 'optional', 'name': 'select'}), ] subclass = None superclass = NamedDimensionalVariable - def __init__(self, name=None, dimension=None, description=None, exposure=None, value=None, select=None, **kwargs_): + def __init__(self, name=None, dimension=None, description=None, exposure=None, value=None, select=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(DerivedVariable, self).__init__(name, dimension, description, exposure, **kwargs_) + self.ns_prefix_ = None + super(globals().get("DerivedVariable"), self).__init__(name, dimension, description, exposure, **kwargs_) self.value = _cast(None, value) + self.value_nsprefix_ = None self.select = _cast(None, select) + self.select_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -1959,9 +2663,21 @@ def factory(*args_, **kwargs_): else: return DerivedVariable(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_value(self): + return self.value + def set_value(self, value): + self.value = value + def get_select(self): + return self.select + def set_select(self, select): + self.select = select + def _hasContent(self): if ( - super(DerivedVariable, self).hasContent_() + super(DerivedVariable, self)._hasContent() ): return True else: @@ -1974,37 +2690,43 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='D eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'DerivedVariable': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DerivedVariable') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DerivedVariable') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DerivedVariable', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DerivedVariable', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DerivedVariable'): - super(DerivedVariable, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DerivedVariable') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DerivedVariable'): + super(DerivedVariable, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DerivedVariable') if self.value is not None and 'value' not in already_processed: already_processed.add('value') outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) if self.select is not None and 'select' not in already_processed: already_processed.add('select') outfile.write(' select=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.select), input_name='select')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DerivedVariable', fromsubclass_=False, pretty_print=True): - super(DerivedVariable, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DerivedVariable', fromsubclass_=False, pretty_print=True): + super(DerivedVariable, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('value', node) if value is not None and 'value' not in already_processed: already_processed.add('value') @@ -2013,22 +2735,26 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'select' not in already_processed: already_processed.add('select') self.select = value - super(DerivedVariable, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(DerivedVariable, self).buildChildren(child_, node, nodeName_, True) + super(DerivedVariable, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(DerivedVariable, self)._buildChildren(child_, node, nodeName_, True) pass # end class DerivedVariable class StateVariable(NamedDimensionalVariable): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = NamedDimensionalVariable - def __init__(self, name=None, dimension=None, description=None, exposure=None, **kwargs_): + def __init__(self, name=None, dimension=None, description=None, exposure=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(StateVariable, self).__init__(name, dimension, description, exposure, **kwargs_) + self.ns_prefix_ = None + super(globals().get("StateVariable"), self).__init__(name, dimension, description, exposure, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -2040,9 +2766,13 @@ def factory(*args_, **kwargs_): else: return StateVariable(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(StateVariable, self).hasContent_() + super(StateVariable, self)._hasContent() ): return True else: @@ -2055,53 +2785,66 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'StateVariable': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='StateVariable') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='StateVariable') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='StateVariable', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='StateVariable', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='StateVariable'): - super(StateVariable, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='StateVariable') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StateVariable', fromsubclass_=False, pretty_print=True): - super(StateVariable, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='StateVariable'): + super(StateVariable, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='StateVariable') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StateVariable', fromsubclass_=False, pretty_print=True): + super(StateVariable, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(StateVariable, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(StateVariable, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(StateVariable, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(StateVariable, self)._buildChildren(child_, node, nodeName_, True) pass # end class StateVariable class ConditionalDerivedVariable(NamedDimensionalVariable): - """LEMS ComponentType for ConditionalDerivedVariable""" + """ConditionalDerivedVariable -- LEMS ComponentType for ConditionalDerivedVariable + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('Case', 'Case', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'Case', u'name': u'Case', u'minOccurs': u'1'}, None), + MemberSpec_('Case', 'Case', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '1', 'name': 'Case', 'type': 'Case'}, None), ] subclass = None superclass = NamedDimensionalVariable - def __init__(self, name=None, dimension=None, description=None, exposure=None, Case=None, **kwargs_): + def __init__(self, name=None, dimension=None, description=None, exposure=None, Case=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ConditionalDerivedVariable, self).__init__(name, dimension, description, exposure, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ConditionalDerivedVariable"), self).__init__(name, dimension, description, exposure, **kwargs_) if Case is None: self.Case = [] else: self.Case = Case + self.Case_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -2113,15 +2856,29 @@ def factory(*args_, **kwargs_): else: return ConditionalDerivedVariable(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_Case(self): + return self.Case + def set_Case(self, Case): + self.Case = Case + def add_Case(self, value): + self.Case.append(value) + def insert_Case_at(self, index, value): + self.Case.insert(index, value) + def replace_Case_at(self, index, value): + self.Case[index] = value + def _hasContent(self): if ( self.Case or - super(ConditionalDerivedVariable, self).hasContent_() + super(ConditionalDerivedVariable, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConditionalDerivedVariable', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ConditionalDerivedVariable', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ConditionalDerivedVariable') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -2129,60 +2886,73 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ConditionalDerivedVariable': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConditionalDerivedVariable') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConditionalDerivedVariable') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ConditionalDerivedVariable', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ConditionalDerivedVariable', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ConditionalDerivedVariable'): - super(ConditionalDerivedVariable, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConditionalDerivedVariable') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConditionalDerivedVariable', fromsubclass_=False, pretty_print=True): - super(ConditionalDerivedVariable, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ConditionalDerivedVariable'): + super(ConditionalDerivedVariable, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConditionalDerivedVariable') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ConditionalDerivedVariable', fromsubclass_=False, pretty_print=True): + super(ConditionalDerivedVariable, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for Case_ in self.Case: + namespaceprefix_ = self.Case_nsprefix_ + ':' if (UseCapturedNS_ and self.Case_nsprefix_) else '' Case_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Case', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ConditionalDerivedVariable, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(ConditionalDerivedVariable, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Case': obj_ = Case.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Case.append(obj_) obj_.original_tagname_ = 'Case' - super(ConditionalDerivedVariable, self).buildChildren(child_, node, nodeName_, True) + super(ConditionalDerivedVariable, self)._buildChildren(child_, node, nodeName_, True) # end class ConditionalDerivedVariable class Case(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('condition', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('value', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_('condition', 'xs:string', 0, 1, {'use': 'optional', 'name': 'condition'}), + MemberSpec_('value', 'xs:string', 0, 0, {'use': 'required', 'name': 'value'}), ] subclass = None superclass = None - def __init__(self, condition=None, value=None, **kwargs_): + def __init__(self, condition=None, value=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.condition = _cast(None, condition) + self.condition_nsprefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -2194,7 +2964,19 @@ def factory(*args_, **kwargs_): else: return Case(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_condition(self): + return self.condition + def set_condition(self, condition): + self.condition = condition + def get_value(self): + return self.value + def set_value(self, value): + self.value = value + def _hasContent(self): if ( ): @@ -2209,35 +2991,41 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Case': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Case') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Case') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Case', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Case', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Case'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Case'): if self.condition is not None and 'condition' not in already_processed: already_processed.add('condition') outfile.write(' condition=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.condition), input_name='condition')), )) if self.value is not None and 'value' not in already_processed: already_processed.add('value') outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Case', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Case', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('condition', node) if value is not None and 'condition' not in already_processed: already_processed.add('condition') @@ -2246,23 +3034,29 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'value' not in already_processed: already_processed.add('value') self.value = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class Case class TimeDerivative(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('variable', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('value', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_('variable', 'xs:string', 0, 0, {'use': 'required', 'name': 'variable'}), + MemberSpec_('value', 'xs:string', 0, 0, {'use': 'required', 'name': 'value'}), ] subclass = None superclass = None - def __init__(self, variable=None, value=None, **kwargs_): + def __init__(self, variable=None, value=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.variable = _cast(None, variable) + self.variable_nsprefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -2274,7 +3068,19 @@ def factory(*args_, **kwargs_): else: return TimeDerivative(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_variable(self): + return self.variable + def set_variable(self, variable): + self.variable = variable + def get_value(self): + return self.value + def set_value(self, value): + self.value = value + def _hasContent(self): if ( ): @@ -2289,35 +3095,41 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='T eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'TimeDerivative': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimeDerivative') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimeDerivative') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TimeDerivative', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TimeDerivative', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TimeDerivative'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TimeDerivative'): if self.variable is not None and 'variable' not in already_processed: already_processed.add('variable') outfile.write(' variable=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.variable), input_name='variable')), )) if self.value is not None and 'value' not in already_processed: already_processed.add('value') outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimeDerivative', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimeDerivative', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('variable', node) if value is not None and 'variable' not in already_processed: already_processed.add('variable') @@ -2326,21 +3138,26 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'value' not in already_processed: already_processed.add('value') self.value = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class TimeDerivative class IncludeType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('href', 'xs:anyURI', 0, 0, {'use': u'required'}), + MemberSpec_('href', 'xs:anyURI', 0, 0, {'use': 'required', 'name': 'href'}), ] subclass = None superclass = None - def __init__(self, href=None, **kwargs_): + def __init__(self, href=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.href = _cast(None, href) + self.href_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -2352,7 +3169,15 @@ def factory(*args_, **kwargs_): else: return IncludeType(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_href(self): + return self.href + def set_href(self, href): + self.href = href + def _hasContent(self): if ( ): @@ -2367,53 +3192,65 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IncludeType': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IncludeType') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IncludeType') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IncludeType', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IncludeType', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IncludeType'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IncludeType'): if self.href is not None and 'href' not in already_processed: already_processed.add('href') outfile.write(' href=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.href), input_name='href')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IncludeType', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IncludeType', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('href', node) if value is not None and 'href' not in already_processed: already_processed.add('href') self.href = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class IncludeType class Q10ConductanceScaling(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('q10_factor', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('experimental_temp', 'Nml2Quantity_temperature', 0, 0, {'use': u'required'}), + MemberSpec_('q10_factor', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'q10_factor'}), + MemberSpec_('experimental_temp', 'Nml2Quantity_temperature', 0, 0, {'use': 'required', 'name': 'experimental_temp'}), ] subclass = None superclass = None - def __init__(self, q10_factor=None, experimental_temp=None, **kwargs_): + def __init__(self, q10_factor=None, experimental_temp=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.q10_factor = _cast(None, q10_factor) + self.q10_factor_nsprefix_ = None self.experimental_temp = _cast(None, experimental_temp) + self.experimental_temp_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -2425,21 +3262,41 @@ def factory(*args_, **kwargs_): else: return Q10ConductanceScaling(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_q10Factor(self): + return self.q10_factor + def set_q10Factor(self, q10_factor): + self.q10_factor = q10_factor + def get_experimentalTemp(self): + return self.experimental_temp + def set_experimentalTemp(self, experimental_temp): + self.experimental_temp = experimental_temp def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_none_patterns_, )) + validate_Nml2Quantity_none_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$']] def validate_Nml2Quantity_temperature(self, value): # Validate type Nml2Quantity_temperature, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_temperature_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_temperature_patterns_, )) - validate_Nml2Quantity_temperature_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_temperature_patterns_, )) + validate_Nml2Quantity_temperature_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC))$']] + def _hasContent(self): if ( ): @@ -2454,35 +3311,41 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Q10ConductanceScaling': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Q10ConductanceScaling') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Q10ConductanceScaling') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Q10ConductanceScaling', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Q10ConductanceScaling', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Q10ConductanceScaling'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Q10ConductanceScaling'): if self.q10_factor is not None and 'q10_factor' not in already_processed: already_processed.add('q10_factor') - outfile.write(' q10Factor=%s' % (quote_attrib(self.q10_factor), )) + outfile.write(' q10Factor=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.q10_factor), input_name='q10Factor')), )) if self.experimental_temp is not None and 'experimental_temp' not in already_processed: already_processed.add('experimental_temp') - outfile.write(' experimentalTemp=%s' % (quote_attrib(self.experimental_temp), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q10ConductanceScaling', fromsubclass_=False, pretty_print=True): + outfile.write(' experimentalTemp=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.experimental_temp), input_name='experimentalTemp')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q10ConductanceScaling', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('q10Factor', node) if value is not None and 'q10Factor' not in already_processed: already_processed.add('q10Factor') @@ -2493,27 +3356,35 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('experimentalTemp') self.experimental_temp = value self.validate_Nml2Quantity_temperature(self.experimental_temp) # validate type Nml2Quantity_temperature - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class Q10ConductanceScaling class Q10Settings(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('fixed_q10', 'Nml2Quantity_none', 0, 1, {'use': u'optional'}), - MemberSpec_('q10_factor', 'Nml2Quantity_none', 0, 1, {'use': u'optional'}), - MemberSpec_('experimental_temp', 'Nml2Quantity_temperature', 0, 1, {'use': u'optional'}), + MemberSpec_('type', 'NmlId', 0, 0, {'use': 'required', 'name': 'type'}), + MemberSpec_('fixed_q10', 'Nml2Quantity_none', 0, 1, {'use': 'optional', 'name': 'fixed_q10'}), + MemberSpec_('q10_factor', 'Nml2Quantity_none', 0, 1, {'use': 'optional', 'name': 'q10_factor'}), + MemberSpec_('experimental_temp', 'Nml2Quantity_temperature', 0, 1, {'use': 'optional', 'name': 'experimental_temp'}), ] subclass = None superclass = None - def __init__(self, type=None, fixed_q10=None, q10_factor=None, experimental_temp=None, **kwargs_): + def __init__(self, type=None, fixed_q10=None, q10_factor=None, experimental_temp=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.fixed_q10 = _cast(None, fixed_q10) + self.fixed_q10_nsprefix_ = None self.q10_factor = _cast(None, q10_factor) + self.q10_factor_nsprefix_ = None self.experimental_temp = _cast(None, experimental_temp) + self.experimental_temp_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -2525,28 +3396,60 @@ def factory(*args_, **kwargs_): else: return Q10Settings(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_type(self): + return self.type + def set_type(self, type): + self.type = type + def get_fixedQ10(self): + return self.fixed_q10 + def set_fixedQ10(self, fixed_q10): + self.fixed_q10 = fixed_q10 + def get_q10Factor(self): + return self.q10_factor + def set_q10Factor(self, q10_factor): + self.q10_factor = q10_factor + def get_experimentalTemp(self): + return self.experimental_temp + def set_experimentalTemp(self, experimental_temp): + self.experimental_temp = experimental_temp def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_none_patterns_, )) + validate_Nml2Quantity_none_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$']] def validate_Nml2Quantity_temperature(self, value): # Validate type Nml2Quantity_temperature, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_temperature_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_temperature_patterns_, )) - validate_Nml2Quantity_temperature_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_temperature_patterns_, )) + validate_Nml2Quantity_temperature_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC))$']] + def _hasContent(self): if ( ): @@ -2561,41 +3464,47 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Q10Settings': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Q10Settings') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Q10Settings') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Q10Settings', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Q10Settings', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Q10Settings'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Q10Settings'): if self.type is not None and 'type' not in already_processed: already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) + outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) if self.fixed_q10 is not None and 'fixed_q10' not in already_processed: already_processed.add('fixed_q10') - outfile.write(' fixedQ10=%s' % (quote_attrib(self.fixed_q10), )) + outfile.write(' fixedQ10=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.fixed_q10), input_name='fixedQ10')), )) if self.q10_factor is not None and 'q10_factor' not in already_processed: already_processed.add('q10_factor') - outfile.write(' q10Factor=%s' % (quote_attrib(self.q10_factor), )) + outfile.write(' q10Factor=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.q10_factor), input_name='q10Factor')), )) if self.experimental_temp is not None and 'experimental_temp' not in already_processed: already_processed.add('experimental_temp') - outfile.write(' experimentalTemp=%s' % (quote_attrib(self.experimental_temp), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q10Settings', fromsubclass_=False, pretty_print=True): + outfile.write(' experimentalTemp=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.experimental_temp), input_name='experimentalTemp')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q10Settings', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') @@ -2616,27 +3525,35 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('experimentalTemp') self.experimental_temp = value self.validate_Nml2Quantity_temperature(self.experimental_temp) # validate type Nml2Quantity_temperature - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class Q10Settings class HHRate(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('rate', 'Nml2Quantity_pertime', 0, 1, {'use': u'optional'}), - MemberSpec_('midpoint', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), - MemberSpec_('scale', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), + MemberSpec_('type', 'NmlId', 0, 0, {'use': 'required', 'name': 'type'}), + MemberSpec_('rate', 'Nml2Quantity_pertime', 0, 1, {'use': 'optional', 'name': 'rate'}), + MemberSpec_('midpoint', 'Nml2Quantity_voltage', 0, 1, {'use': 'optional', 'name': 'midpoint'}), + MemberSpec_('scale', 'Nml2Quantity_voltage', 0, 1, {'use': 'optional', 'name': 'scale'}), ] subclass = None superclass = None - def __init__(self, type=None, rate=None, midpoint=None, scale=None, **kwargs_): + def __init__(self, type=None, rate=None, midpoint=None, scale=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.rate = _cast(None, rate) + self.rate_nsprefix_ = None self.midpoint = _cast(None, midpoint) + self.midpoint_nsprefix_ = None self.scale = _cast(None, scale) + self.scale_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -2648,28 +3565,60 @@ def factory(*args_, **kwargs_): else: return HHRate(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_type(self): + return self.type + def set_type(self, type): + self.type = type + def get_rate(self): + return self.rate + def set_rate(self, rate): + self.rate = rate + def get_midpoint(self): + return self.midpoint + def set_midpoint(self, midpoint): + self.midpoint = midpoint + def get_scale(self): + return self.scale + def set_scale(self, scale): + self.scale = scale def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_pertime_patterns_, )) + validate_Nml2Quantity_pertime_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] + def _hasContent(self): if ( ): @@ -2684,41 +3633,47 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='H eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'HHRate': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HHRate') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HHRate') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HHRate', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HHRate', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HHRate'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HHRate'): if self.type is not None and 'type' not in already_processed: already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) + outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) if self.rate is not None and 'rate' not in already_processed: already_processed.add('rate') - outfile.write(' rate=%s' % (quote_attrib(self.rate), )) + outfile.write(' rate=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.rate), input_name='rate')), )) if self.midpoint is not None and 'midpoint' not in already_processed: already_processed.add('midpoint') - outfile.write(' midpoint=%s' % (quote_attrib(self.midpoint), )) + outfile.write(' midpoint=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.midpoint), input_name='midpoint')), )) if self.scale is not None and 'scale' not in already_processed: already_processed.add('scale') - outfile.write(' scale=%s' % (quote_attrib(self.scale), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHRate', fromsubclass_=False, pretty_print=True): + outfile.write(' scale=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.scale), input_name='scale')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHRate', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') @@ -2739,27 +3694,35 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('scale') self.scale = value self.validate_Nml2Quantity_voltage(self.scale) # validate type Nml2Quantity_voltage - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class HHRate class HHVariable(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('rate', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('midpoint', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), - MemberSpec_('scale', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), + MemberSpec_('type', 'NmlId', 0, 0, {'use': 'required', 'name': 'type'}), + MemberSpec_('rate', 'xs:float', 0, 1, {'use': 'optional', 'name': 'rate'}), + MemberSpec_('midpoint', 'Nml2Quantity_voltage', 0, 1, {'use': 'optional', 'name': 'midpoint'}), + MemberSpec_('scale', 'Nml2Quantity_voltage', 0, 1, {'use': 'optional', 'name': 'scale'}), ] subclass = None superclass = None - def __init__(self, type=None, rate=None, midpoint=None, scale=None, **kwargs_): + def __init__(self, type=None, rate=None, midpoint=None, scale=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.rate = _cast(float, rate) + self.rate_nsprefix_ = None self.midpoint = _cast(None, midpoint) + self.midpoint_nsprefix_ = None self.scale = _cast(None, scale) + self.scale_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -2771,21 +3734,49 @@ def factory(*args_, **kwargs_): else: return HHVariable(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_type(self): + return self.type + def set_type(self, type): + self.type = type + def get_rate(self): + return self.rate + def set_rate(self, rate): + self.rate = rate + def get_midpoint(self): + return self.midpoint + def set_midpoint(self, midpoint): + self.midpoint = midpoint + def get_scale(self): + return self.scale + def set_scale(self, scale): + self.scale = scale def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] + def _hasContent(self): if ( ): @@ -2800,41 +3791,47 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='H eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'HHVariable': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HHVariable') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HHVariable') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HHVariable', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HHVariable', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HHVariable'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HHVariable'): if self.type is not None and 'type' not in already_processed: already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) + outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) if self.rate is not None and 'rate' not in already_processed: already_processed.add('rate') outfile.write(' rate="%s"' % self.gds_format_float(self.rate, input_name='rate')) if self.midpoint is not None and 'midpoint' not in already_processed: already_processed.add('midpoint') - outfile.write(' midpoint=%s' % (quote_attrib(self.midpoint), )) + outfile.write(' midpoint=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.midpoint), input_name='midpoint')), )) if self.scale is not None and 'scale' not in already_processed: already_processed.add('scale') - outfile.write(' scale=%s' % (quote_attrib(self.scale), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHVariable', fromsubclass_=False, pretty_print=True): + outfile.write(' scale=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.scale), input_name='scale')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHVariable', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') @@ -2843,10 +3840,8 @@ def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('rate', node) if value is not None and 'rate' not in already_processed: already_processed.add('rate') - try: - self.rate = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (rate): %s' % exp) + value = self.gds_parse_float(value, node, 'rate') + self.rate = value value = find_attr_value_('midpoint', node) if value is not None and 'midpoint' not in already_processed: already_processed.add('midpoint') @@ -2857,29 +3852,38 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('scale') self.scale = value self.validate_Nml2Quantity_voltage(self.scale) # validate type Nml2Quantity_voltage - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class HHVariable class HHTime(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('rate', 'Nml2Quantity_time', 0, 1, {'use': u'optional'}), - MemberSpec_('midpoint', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), - MemberSpec_('scale', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), - MemberSpec_('tau', 'Nml2Quantity_time', 0, 1, {'use': u'optional'}), + MemberSpec_('type', 'NmlId', 0, 0, {'use': 'required', 'name': 'type'}), + MemberSpec_('rate', 'Nml2Quantity_time', 0, 1, {'use': 'optional', 'name': 'rate'}), + MemberSpec_('midpoint', 'Nml2Quantity_voltage', 0, 1, {'use': 'optional', 'name': 'midpoint'}), + MemberSpec_('scale', 'Nml2Quantity_voltage', 0, 1, {'use': 'optional', 'name': 'scale'}), + MemberSpec_('tau', 'Nml2Quantity_time', 0, 1, {'use': 'optional', 'name': 'tau'}), ] subclass = None superclass = None - def __init__(self, type=None, rate=None, midpoint=None, scale=None, tau=None, **kwargs_): + def __init__(self, type=None, rate=None, midpoint=None, scale=None, tau=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.rate = _cast(None, rate) + self.rate_nsprefix_ = None self.midpoint = _cast(None, midpoint) + self.midpoint_nsprefix_ = None self.scale = _cast(None, scale) + self.scale_nsprefix_ = None self.tau = _cast(None, tau) + self.tau_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -2891,28 +3895,64 @@ def factory(*args_, **kwargs_): else: return HHTime(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_type(self): + return self.type + def set_type(self, type): + self.type = type + def get_rate(self): + return self.rate + def set_rate(self, rate): + self.rate = rate + def get_midpoint(self): + return self.midpoint + def set_midpoint(self, midpoint): + self.midpoint = midpoint + def get_scale(self): + return self.scale + def set_scale(self, scale): + self.scale = scale + def get_tau(self): + return self.tau + def set_tau(self, tau): + self.tau = tau def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] + def _hasContent(self): if ( ): @@ -2927,44 +3967,50 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='H eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'HHTime': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HHTime') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HHTime') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HHTime', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HHTime', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HHTime'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HHTime'): if self.type is not None and 'type' not in already_processed: already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) + outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) if self.rate is not None and 'rate' not in already_processed: already_processed.add('rate') - outfile.write(' rate=%s' % (quote_attrib(self.rate), )) + outfile.write(' rate=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.rate), input_name='rate')), )) if self.midpoint is not None and 'midpoint' not in already_processed: already_processed.add('midpoint') - outfile.write(' midpoint=%s' % (quote_attrib(self.midpoint), )) + outfile.write(' midpoint=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.midpoint), input_name='midpoint')), )) if self.scale is not None and 'scale' not in already_processed: already_processed.add('scale') - outfile.write(' scale=%s' % (quote_attrib(self.scale), )) + outfile.write(' scale=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.scale), input_name='scale')), )) if self.tau is not None and 'tau' not in already_processed: already_processed.add('tau') - outfile.write(' tau=%s' % (quote_attrib(self.tau), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHTime', fromsubclass_=False, pretty_print=True): + outfile.write(' tau=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tau), input_name='tau')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHTime', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') @@ -2990,29 +4036,38 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('tau') self.tau = value self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class HHTime class BlockMechanism(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'BlockTypes', 0, 0, {'use': u'required'}), - MemberSpec_('species', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('block_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('scaling_conc', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('scaling_volt', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_('type', 'BlockTypes', 0, 0, {'use': 'required', 'name': 'type'}), + MemberSpec_('species', 'NmlId', 0, 0, {'use': 'required', 'name': 'species'}), + MemberSpec_('block_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': 'required', 'name': 'block_concentration'}), + MemberSpec_('scaling_conc', 'Nml2Quantity_concentration', 0, 0, {'use': 'required', 'name': 'scaling_conc'}), + MemberSpec_('scaling_volt', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'scaling_volt'}), ] subclass = None superclass = None - def __init__(self, type=None, species=None, block_concentration=None, scaling_conc=None, scaling_volt=None, **kwargs_): + def __init__(self, type=None, species=None, block_concentration=None, scaling_conc=None, scaling_volt=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.species = _cast(None, species) + self.species_nsprefix_ = None self.block_concentration = _cast(None, block_concentration) + self.block_concentration_nsprefix_ = None self.scaling_conc = _cast(None, scaling_conc) + self.scaling_conc_nsprefix_ = None self.scaling_volt = _cast(None, scaling_volt) + self.scaling_volt_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -3024,40 +4079,77 @@ def factory(*args_, **kwargs_): else: return BlockMechanism(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_type(self): + return self.type + def set_type(self, type): + self.type = type + def get_species(self): + return self.species + def set_species(self, species): + self.species = species + def get_blockConcentration(self): + return self.block_concentration + def set_blockConcentration(self, block_concentration): + self.block_concentration = block_concentration + def get_scalingConc(self): + return self.scaling_conc + def set_scalingConc(self, scaling_conc): + self.scaling_conc = scaling_conc + def get_scalingVolt(self): + return self.scaling_volt + def set_scalingVolt(self, scaling_volt): + self.scaling_volt = scaling_volt def validate_BlockTypes(self, value): # Validate type BlockTypes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False + value = value enumerations = ['voltageConcDepBlockMechanism'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on BlockTypes' % {"value" : value.encode("utf-8")} ) + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on BlockTypes' % {"value" : encode_str_2_3(value), "lineno": lineno} ) + result = False def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_concentration(self, value): # Validate type Nml2Quantity_concentration, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_concentration_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_concentration_patterns_, )) - validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_concentration_patterns_, )) + validate_Nml2Quantity_concentration_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM))$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] + def _hasContent(self): if ( ): @@ -3072,44 +4164,50 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BlockMechanism': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BlockMechanism') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BlockMechanism') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BlockMechanism', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BlockMechanism', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BlockMechanism'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BlockMechanism'): if self.type is not None and 'type' not in already_processed: already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) + outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) if self.species is not None and 'species' not in already_processed: already_processed.add('species') - outfile.write(' species=%s' % (quote_attrib(self.species), )) + outfile.write(' species=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.species), input_name='species')), )) if self.block_concentration is not None and 'block_concentration' not in already_processed: already_processed.add('block_concentration') - outfile.write(' blockConcentration=%s' % (quote_attrib(self.block_concentration), )) + outfile.write(' blockConcentration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.block_concentration), input_name='blockConcentration')), )) if self.scaling_conc is not None and 'scaling_conc' not in already_processed: already_processed.add('scaling_conc') - outfile.write(' scalingConc=%s' % (quote_attrib(self.scaling_conc), )) + outfile.write(' scalingConc=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.scaling_conc), input_name='scalingConc')), )) if self.scaling_volt is not None and 'scaling_volt' not in already_processed: already_processed.add('scaling_volt') - outfile.write(' scalingVolt=%s' % (quote_attrib(self.scaling_volt), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BlockMechanism', fromsubclass_=False, pretty_print=True): + outfile.write(' scalingVolt=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.scaling_volt), input_name='scalingVolt')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BlockMechanism', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') @@ -3135,27 +4233,35 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('scalingVolt') self.scaling_volt = value self.validate_Nml2Quantity_voltage(self.scaling_volt) # validate type Nml2Quantity_voltage - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class BlockMechanism class PlasticityMechanism(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'PlasticityTypes', 0, 0, {'use': u'required'}), - MemberSpec_('init_release_prob', 'ZeroToOne', 0, 0, {'use': u'required'}), - MemberSpec_('tau_rec', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('tau_fac', 'Nml2Quantity_time', 0, 1, {'use': u'optional'}), + MemberSpec_('type', 'PlasticityTypes', 0, 0, {'use': 'required', 'name': 'type'}), + MemberSpec_('init_release_prob', 'ZeroToOne', 0, 0, {'use': 'required', 'name': 'init_release_prob'}), + MemberSpec_('tau_rec', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'tau_rec'}), + MemberSpec_('tau_fac', 'Nml2Quantity_time', 0, 1, {'use': 'optional', 'name': 'tau_fac'}), ] subclass = None superclass = None - def __init__(self, type=None, init_release_prob=None, tau_rec=None, tau_fac=None, **kwargs_): + def __init__(self, type=None, init_release_prob=None, tau_rec=None, tau_fac=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.init_release_prob = _cast(float, init_release_prob) + self.init_release_prob_nsprefix_ = None self.tau_rec = _cast(None, tau_rec) + self.tau_rec_nsprefix_ = None self.tau_fac = _cast(None, tau_fac) + self.tau_fac_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -3167,33 +4273,66 @@ def factory(*args_, **kwargs_): else: return PlasticityMechanism(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_type(self): + return self.type + def set_type(self, type): + self.type = type + def get_initReleaseProb(self): + return self.init_release_prob + def set_initReleaseProb(self, init_release_prob): + self.init_release_prob = init_release_prob + def get_tauRec(self): + return self.tau_rec + def set_tauRec(self, tau_rec): + self.tau_rec = tau_rec + def get_tauFac(self): + return self.tau_fac + def set_tauFac(self, tau_fac): + self.tau_fac = tau_fac def validate_PlasticityTypes(self, value): # Validate type PlasticityTypes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False + value = value enumerations = ['tsodyksMarkramDepMechanism', 'tsodyksMarkramDepFacMechanism'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on PlasticityTypes' % {"value" : value.encode("utf-8")} ) + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on PlasticityTypes' % {"value" : encode_str_2_3(value), "lineno": lineno} ) + result = False def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) + return False if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ZeroToOne' % {"value": value, "lineno": lineno} ) + result = False if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ZeroToOne' % {"value": value, "lineno": lineno} ) + result = False def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( ): @@ -3208,41 +4347,47 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='P eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'PlasticityMechanism': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PlasticityMechanism') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PlasticityMechanism') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PlasticityMechanism', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PlasticityMechanism', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PlasticityMechanism'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PlasticityMechanism'): if self.type is not None and 'type' not in already_processed: already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) + outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) if self.init_release_prob is not None and 'init_release_prob' not in already_processed: already_processed.add('init_release_prob') - outfile.write(' initReleaseProb=%s' % (quote_attrib(self.init_release_prob), )) + outfile.write(' initReleaseProb="%s"' % self.gds_format_float(self.init_release_prob, input_name='initReleaseProb')) if self.tau_rec is not None and 'tau_rec' not in already_processed: already_processed.add('tau_rec') - outfile.write(' tauRec=%s' % (quote_attrib(self.tau_rec), )) + outfile.write(' tauRec=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tau_rec), input_name='tauRec')), )) if self.tau_fac is not None and 'tau_fac' not in already_processed: already_processed.add('tau_fac') - outfile.write(' tauFac=%s' % (quote_attrib(self.tau_fac), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PlasticityMechanism', fromsubclass_=False, pretty_print=True): + outfile.write(' tauFac=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tau_fac), input_name='tauFac')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PlasticityMechanism', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') @@ -3251,10 +4396,8 @@ def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('initReleaseProb', node) if value is not None and 'initReleaseProb' not in already_processed: already_processed.add('initReleaseProb') - try: - self.init_release_prob = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (initReleaseProb): %s' % exp) + value = self.gds_parse_float(value, node, 'initReleaseProb') + self.init_release_prob = value self.validate_ZeroToOne(self.init_release_prob) # validate type ZeroToOne value = find_attr_value_('tauRec', node) if value is not None and 'tauRec' not in already_processed: @@ -3266,23 +4409,29 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('tauFac') self.tau_fac = value self.validate_Nml2Quantity_time(self.tau_fac) # validate type Nml2Quantity_time - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class PlasticityMechanism class SegmentParent(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('segments', 'NonNegativeInteger', 0, 0, {'use': u'required'}), - MemberSpec_('fraction_along', 'ZeroToOne', 0, 1, {'use': u'optional'}), + MemberSpec_('segments', 'NonNegativeInteger', 0, 0, {'use': 'required', 'name': 'segments'}), + MemberSpec_('fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional', 'name': 'fraction_along'}), ] subclass = None superclass = None - def __init__(self, segments=None, fraction_along='1', **kwargs_): + def __init__(self, segments=None, fraction_along='1', gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.segments = _cast(int, segments) + self.segments_nsprefix_ = None self.fraction_along = _cast(float, fraction_along) + self.fraction_along_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -3294,18 +4443,42 @@ def factory(*args_, **kwargs_): else: return SegmentParent(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_segment(self): + return self.segments + def set_segment(self, segments): + self.segments = segments + def get_fractionAlong(self): + return self.fraction_along + def set_fractionAlong(self, fraction_along): + self.fraction_along = fraction_along def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) + return False if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ZeroToOne' % {"value": value, "lineno": lineno} ) + result = False if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) - def hasContent_(self): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ZeroToOne' % {"value": value, "lineno": lineno} ) + result = False + def _hasContent(self): if ( ): @@ -3320,75 +4493,86 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SegmentParent': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentParent') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentParent') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SegmentParent', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SegmentParent', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SegmentParent'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SegmentParent'): if self.segments is not None and 'segments' not in already_processed: already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) + outfile.write(' segment="%s"' % self.gds_format_integer(self.segments, input_name='segment')) if self.fraction_along != 1 and 'fraction_along' not in already_processed: already_processed.add('fraction_along') - outfile.write(' fractionAlong=%s' % (quote_attrib(self.fraction_along), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentParent', fromsubclass_=False, pretty_print=True): + outfile.write(' fractionAlong="%s"' % self.gds_format_float(self.fraction_along, input_name='fractionAlong')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentParent', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('segment', node) if value is not None and 'segment' not in already_processed: already_processed.add('segment') - try: - self.segments = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.segments = self.gds_parse_integer(value, node, 'segment') if self.segments < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger value = find_attr_value_('fractionAlong', node) if value is not None and 'fractionAlong' not in already_processed: already_processed.add('fractionAlong') - try: - self.fraction_along = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (fractionAlong): %s' % exp) + value = self.gds_parse_float(value, node, 'fractionAlong') + self.fraction_along = value self.validate_ZeroToOne(self.fraction_along) # validate type ZeroToOne - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class SegmentParent class Point3DWithDiam(GeneratedsSuper): - """A 3D point with diameter.""" + """Point3DWithDiam -- A 3D point with diameter. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('x', 'xs:double', 0, 0, {'use': u'required'}), - MemberSpec_('y', 'xs:double', 0, 0, {'use': u'required'}), - MemberSpec_('z', 'xs:double', 0, 0, {'use': u'required'}), - MemberSpec_('diameter', 'DoubleGreaterThanZero', 0, 0, {'use': u'required'}), + MemberSpec_('x', 'xs:double', 0, 0, {'use': 'required', 'name': 'x'}), + MemberSpec_('y', 'xs:double', 0, 0, {'use': 'required', 'name': 'y'}), + MemberSpec_('z', 'xs:double', 0, 0, {'use': 'required', 'name': 'z'}), + MemberSpec_('diameter', 'DoubleGreaterThanZero', 0, 0, {'use': 'required', 'name': 'diameter'}), ] subclass = None superclass = None - def __init__(self, x=None, y=None, z=None, diameter=None, **kwargs_): + def __init__(self, x=None, y=None, z=None, diameter=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.x = _cast(float, x) + self.x_nsprefix_ = None self.y = _cast(float, y) + self.y_nsprefix_ = None self.z = _cast(float, z) + self.z_nsprefix_ = None self.diameter = _cast(float, diameter) + self.diameter_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -3400,12 +4584,38 @@ def factory(*args_, **kwargs_): else: return Point3DWithDiam(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_x(self): + return self.x + def set_x(self, x): + self.x = x + def get_y(self): + return self.y + def set_y(self, y): + self.y = y + def get_z(self): + return self.z + def set_z(self, z): + self.z = z + def get_diameter(self): + return self.diameter + def set_diameter(self, diameter): + self.diameter = diameter def validate_DoubleGreaterThanZero(self, value): # Validate type DoubleGreaterThanZero, a restriction on xs:double. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) + return False if value <= 0: - warnings_.warn('Value "%(value)s" does not match xsd minExclusive restriction on DoubleGreaterThanZero' % {"value" : value} ) - def hasContent_(self): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minExclusive restriction on DoubleGreaterThanZero' % {"value": value, "lineno": lineno} ) + result = False + def _hasContent(self): if ( ): @@ -3420,19 +4630,21 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='P eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Point3DWithDiam': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Point3DWithDiam') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Point3DWithDiam') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Point3DWithDiam', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Point3DWithDiam', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Point3DWithDiam'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Point3DWithDiam'): if self.x is not None and 'x' not in already_processed: already_processed.add('x') outfile.write(' x="%s"' % self.gds_format_double(self.x, input_name='x')) @@ -3444,47 +4656,43 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' outfile.write(' z="%s"' % self.gds_format_double(self.z, input_name='z')) if self.diameter is not None and 'diameter' not in already_processed: already_processed.add('diameter') - outfile.write(' diameter=%s' % (quote_attrib(self.diameter), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Point3DWithDiam', fromsubclass_=False, pretty_print=True): + outfile.write(' diameter="%s"' % self.gds_format_double(self.diameter, input_name='diameter')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Point3DWithDiam', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('x', node) if value is not None and 'x' not in already_processed: already_processed.add('x') - try: - self.x = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (x): %s' % exp) + value = self.gds_parse_double(value, node, 'x') + self.x = value value = find_attr_value_('y', node) if value is not None and 'y' not in already_processed: already_processed.add('y') - try: - self.y = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (y): %s' % exp) + value = self.gds_parse_double(value, node, 'y') + self.y = value value = find_attr_value_('z', node) if value is not None and 'z' not in already_processed: already_processed.add('z') - try: - self.z = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (z): %s' % exp) + value = self.gds_parse_double(value, node, 'z') + self.z = value value = find_attr_value_('diameter', node) if value is not None and 'diameter' not in already_processed: already_processed.add('diameter') - try: - self.diameter = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (diameter): %s' % exp) + value = self.gds_parse_double(value, node, 'diameter') + self.diameter = value self.validate_DoubleGreaterThanZero(self.diameter) # validate type DoubleGreaterThanZero - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass def __str__(self): @@ -3518,15 +4726,20 @@ def distance_to(self, other_3d_point): class ProximalDetails(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('translation_start', 'xs:double', 0, 0, {'use': u'required'}), + MemberSpec_('translation_start', 'xs:double', 0, 0, {'use': 'required', 'name': 'translation_start'}), ] subclass = None superclass = None - def __init__(self, translation_start=None, **kwargs_): + def __init__(self, translation_start=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.translation_start = _cast(float, translation_start) + self.translation_start_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -3538,7 +4751,15 @@ def factory(*args_, **kwargs_): else: return ProximalDetails(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_translationStart(self): + return self.translation_start + def set_translationStart(self, translation_start): + self.translation_start = translation_start + def _hasContent(self): if ( ): @@ -3553,54 +4774,63 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='P eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ProximalDetails': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ProximalDetails') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ProximalDetails') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ProximalDetails', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ProximalDetails', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ProximalDetails'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ProximalDetails'): if self.translation_start is not None and 'translation_start' not in already_processed: already_processed.add('translation_start') outfile.write(' translationStart="%s"' % self.gds_format_double(self.translation_start, input_name='translationStart')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ProximalDetails', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ProximalDetails', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('translationStart', node) if value is not None and 'translationStart' not in already_processed: already_processed.add('translationStart') - try: - self.translation_start = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (translationStart): %s' % exp) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + value = self.gds_parse_double(value, node, 'translationStart') + self.translation_start = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class ProximalDetails class DistalDetails(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('normalization_end', 'xs:double', 0, 0, {'use': u'required'}), + MemberSpec_('normalization_end', 'xs:double', 0, 0, {'use': 'required', 'name': 'normalization_end'}), ] subclass = None superclass = None - def __init__(self, normalization_end=None, **kwargs_): + def __init__(self, normalization_end=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.normalization_end = _cast(float, normalization_end) + self.normalization_end_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -3612,7 +4842,15 @@ def factory(*args_, **kwargs_): else: return DistalDetails(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_normalizationEnd(self): + return self.normalization_end + def set_normalizationEnd(self, normalization_end): + self.normalization_end = normalization_end + def _hasContent(self): if ( ): @@ -3627,54 +4865,63 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='D eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'DistalDetails': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DistalDetails') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DistalDetails') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DistalDetails', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DistalDetails', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DistalDetails'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DistalDetails'): if self.normalization_end is not None and 'normalization_end' not in already_processed: already_processed.add('normalization_end') outfile.write(' normalizationEnd="%s"' % self.gds_format_double(self.normalization_end, input_name='normalizationEnd')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DistalDetails', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DistalDetails', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('normalizationEnd', node) if value is not None and 'normalizationEnd' not in already_processed: already_processed.add('normalizationEnd') - try: - self.normalization_end = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (normalizationEnd): %s' % exp) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + value = self.gds_parse_double(value, node, 'normalizationEnd') + self.normalization_end = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class DistalDetails class Member(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('segments', 'NonNegativeInteger', 0, 0, {'use': u'required'}), + MemberSpec_('segments', 'NonNegativeInteger', 0, 0, {'use': 'required', 'name': 'segments'}), ] subclass = None superclass = None - def __init__(self, segments=None, **kwargs_): + def __init__(self, segments=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.segments = _cast(int, segments) + self.segments_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -3686,11 +4933,23 @@ def factory(*args_, **kwargs_): else: return Member(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_segment(self): + return self.segments + def set_segment(self, segments): + self.segments = segments def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + def _hasContent(self): if ( ): @@ -3705,57 +4964,65 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='M eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Member': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Member') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Member') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Member', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Member', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Member'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Member'): if self.segments is not None and 'segments' not in already_processed: already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Member', fromsubclass_=False, pretty_print=True): + outfile.write(' segment="%s"' % self.gds_format_integer(self.segments, input_name='segment')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Member', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('segment', node) if value is not None and 'segment' not in already_processed: already_processed.add('segment') - try: - self.segments = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.segments = self.gds_parse_integer(value, node, 'segment') if self.segments < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class Member class Include(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('segment_groups', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_('segment_groups', 'NmlId', 0, 0, {'use': 'required', 'name': 'segment_groups'}), ] subclass = None superclass = None - def __init__(self, segment_groups=None, **kwargs_): + def __init__(self, segment_groups=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -3767,14 +5034,26 @@ def factory(*args_, **kwargs_): else: return Include(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( ): @@ -3789,54 +5068,66 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Include': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Include') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Include') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Include', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Include', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Include'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Include'): if self.segment_groups is not None and 'segment_groups' not in already_processed: already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Include', fromsubclass_=False, pretty_print=True): + outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Include', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('segmentGroup', node) if value is not None and 'segmentGroup' not in already_processed: already_processed.add('segmentGroup') self.segment_groups = value self.validate_NmlId(self.segment_groups) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class Include class Path(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('from_', 'SegmentEndPoint', 0, 1, {u'type': u'SegmentEndPoint', u'name': u'from', u'minOccurs': u'0'}, None), - MemberSpec_('to', 'SegmentEndPoint', 0, 1, {u'type': u'SegmentEndPoint', u'name': u'to', u'minOccurs': u'0'}, None), + MemberSpec_('from_', 'SegmentEndPoint', 0, 1, {'minOccurs': '0', 'name': 'from', 'type': 'SegmentEndPoint'}, None), + MemberSpec_('to', 'SegmentEndPoint', 0, 1, {'minOccurs': '0', 'name': 'to', 'type': 'SegmentEndPoint'}, None), ] subclass = None superclass = None - def __init__(self, from_=None, to=None, **kwargs_): + def __init__(self, from_=None, to=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.from_ = from_ + self.from__nsprefix_ = None self.to = to + self.to_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -3848,7 +5139,19 @@ def factory(*args_, **kwargs_): else: return Path(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_from(self): + return self.from_ + def set_from(self, from_): + self.from_ = from_ + def get_to(self): + return self.to + def set_to(self, to): + self.to = to + def _hasContent(self): if ( self.from_ is not None or self.to is not None @@ -3856,7 +5159,7 @@ def hasContent_(self): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Path', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Path', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Path') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -3864,65 +5167,79 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='P eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Path': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Path') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Path') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Path', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Path', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Path'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Path'): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Path', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Path', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.from_ is not None: + namespaceprefix_ = self.from__nsprefix_ + ':' if (UseCapturedNS_ and self.from__nsprefix_) else '' self.from_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='from', pretty_print=pretty_print) if self.to is not None: + namespaceprefix_ = self.to_nsprefix_ + ':' if (UseCapturedNS_ and self.to_nsprefix_) else '' self.to.export(outfile, level, namespaceprefix_, namespacedef_='', name_='to', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): pass - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'from': obj_ = SegmentEndPoint.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.from_ = obj_ obj_.original_tagname_ = 'from' elif nodeName_ == 'to': obj_ = SegmentEndPoint.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.to = obj_ obj_.original_tagname_ = 'to' # end class Path class SubTree(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('from_', 'SegmentEndPoint', 0, 1, {u'type': u'SegmentEndPoint', u'name': u'from', u'minOccurs': u'0'}, 3), - MemberSpec_('to', 'SegmentEndPoint', 0, 1, {u'type': u'SegmentEndPoint', u'name': u'to', u'minOccurs': u'0'}, 3), + MemberSpec_('from_', 'SegmentEndPoint', 0, 1, {'minOccurs': '0', 'name': 'from', 'type': 'SegmentEndPoint'}, 3), + MemberSpec_('to', 'SegmentEndPoint', 0, 1, {'minOccurs': '0', 'name': 'to', 'type': 'SegmentEndPoint'}, 3), ] subclass = None superclass = None - def __init__(self, from_=None, to=None, **kwargs_): + def __init__(self, from_=None, to=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.from_ = from_ + self.from__nsprefix_ = None self.to = to + self.to_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -3934,7 +5251,19 @@ def factory(*args_, **kwargs_): else: return SubTree(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_from(self): + return self.from_ + def set_from(self, from_): + self.from_ = from_ + def get_to(self): + return self.to + def set_to(self, to): + self.to = to + def _hasContent(self): if ( self.from_ is not None or self.to is not None @@ -3942,7 +5271,7 @@ def hasContent_(self): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SubTree', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='SubTree', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('SubTree') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -3950,63 +5279,76 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SubTree': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SubTree') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SubTree') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SubTree', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SubTree', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SubTree'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SubTree'): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SubTree', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='SubTree', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.from_ is not None: + namespaceprefix_ = self.from__nsprefix_ + ':' if (UseCapturedNS_ and self.from__nsprefix_) else '' self.from_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='from', pretty_print=pretty_print) if self.to is not None: + namespaceprefix_ = self.to_nsprefix_ + ':' if (UseCapturedNS_ and self.to_nsprefix_) else '' self.to.export(outfile, level, namespaceprefix_, namespacedef_='', name_='to', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): pass - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'from': obj_ = SegmentEndPoint.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.from_ = obj_ obj_.original_tagname_ = 'from' elif nodeName_ == 'to': obj_ = SegmentEndPoint.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.to = obj_ obj_.original_tagname_ = 'to' # end class SubTree class SegmentEndPoint(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('segments', 'NonNegativeInteger', 0, 0, {'use': u'required'}), + MemberSpec_('segments', 'NonNegativeInteger', 0, 0, {'use': 'required', 'name': 'segments'}), ] subclass = None superclass = None - def __init__(self, segments=None, **kwargs_): + def __init__(self, segments=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.segments = _cast(int, segments) + self.segments_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -4018,11 +5360,23 @@ def factory(*args_, **kwargs_): else: return SegmentEndPoint(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_segment(self): + return self.segments + def set_segment(self, segments): + self.segments = segments def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + def _hasContent(self): if ( ): @@ -4037,115 +5391,134 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SegmentEndPoint': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentEndPoint') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentEndPoint') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SegmentEndPoint', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SegmentEndPoint', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SegmentEndPoint'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SegmentEndPoint'): if self.segments is not None and 'segments' not in already_processed: already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentEndPoint', fromsubclass_=False, pretty_print=True): + outfile.write(' segment="%s"' % self.gds_format_integer(self.segments, input_name='segment')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentEndPoint', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('segment', node) if value is not None and 'segment' not in already_processed: already_processed.add('segment') - try: - self.segments = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.segments = self.gds_parse_integer(value, node, 'segment') if self.segments < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class SegmentEndPoint class MembraneProperties(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('channel_populations', 'ChannelPopulation', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelPopulation', u'name': u'channelPopulation', u'minOccurs': u'0'}, None), - MemberSpec_('channel_densities', 'ChannelDensity', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensity', u'name': u'channelDensity', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_v_shifts', 'ChannelDensityVShift', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityVShift', u'name': u'channelDensityVShift', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_nernsts', 'ChannelDensityNernst', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNernst', u'name': u'channelDensityNernst', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_ghks', 'ChannelDensityGHK', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityGHK', u'name': u'channelDensityGHK', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_ghk2s', 'ChannelDensityGHK2', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityGHK2', u'name': u'channelDensityGHK2', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_non_uniforms', 'ChannelDensityNonUniform', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNonUniform', u'name': u'channelDensityNonUniform', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_non_uniform_nernsts', 'ChannelDensityNonUniformNernst', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNonUniformNernst', u'name': u'channelDensityNonUniformNernst', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_non_uniform_ghks', 'ChannelDensityNonUniformGHK', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNonUniformGHK', u'name': u'channelDensityNonUniformGHK', u'minOccurs': u'0'}, None), - MemberSpec_('spike_threshes', 'SpikeThresh', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeThresh', u'name': u'spikeThresh', u'minOccurs': u'0'}, None), - MemberSpec_('specific_capacitances', 'SpecificCapacitance', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpecificCapacitance', u'name': u'specificCapacitance', u'minOccurs': u'0'}, None), - MemberSpec_('init_memb_potentials', 'InitMembPotential', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InitMembPotential', u'name': u'initMembPotential', u'minOccurs': u'0'}, None), + MemberSpec_('channel_populations', 'ChannelPopulation', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'channelPopulation', 'type': 'ChannelPopulation'}, None), + MemberSpec_('channel_densities', 'ChannelDensity', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'channelDensity', 'type': 'ChannelDensity'}, None), + MemberSpec_('channel_density_v_shifts', 'ChannelDensityVShift', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'channelDensityVShift', 'type': 'ChannelDensityVShift'}, None), + MemberSpec_('channel_density_nernsts', 'ChannelDensityNernst', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'channelDensityNernst', 'type': 'ChannelDensityNernst'}, None), + MemberSpec_('channel_density_ghks', 'ChannelDensityGHK', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'channelDensityGHK', 'type': 'ChannelDensityGHK'}, None), + MemberSpec_('channel_density_ghk2s', 'ChannelDensityGHK2', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'channelDensityGHK2', 'type': 'ChannelDensityGHK2'}, None), + MemberSpec_('channel_density_non_uniforms', 'ChannelDensityNonUniform', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'channelDensityNonUniform', 'type': 'ChannelDensityNonUniform'}, None), + MemberSpec_('channel_density_non_uniform_nernsts', 'ChannelDensityNonUniformNernst', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'channelDensityNonUniformNernst', 'type': 'ChannelDensityNonUniformNernst'}, None), + MemberSpec_('channel_density_non_uniform_ghks', 'ChannelDensityNonUniformGHK', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'channelDensityNonUniformGHK', 'type': 'ChannelDensityNonUniformGHK'}, None), + MemberSpec_('spike_threshes', 'SpikeThresh', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'spikeThresh', 'type': 'SpikeThresh'}, None), + MemberSpec_('specific_capacitances', 'SpecificCapacitance', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'specificCapacitance', 'type': 'SpecificCapacitance'}, None), + MemberSpec_('init_memb_potentials', 'InitMembPotential', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'initMembPotential', 'type': 'InitMembPotential'}, None), ] subclass = None superclass = None - def __init__(self, channel_populations=None, channel_densities=None, channel_density_v_shifts=None, channel_density_nernsts=None, channel_density_ghks=None, channel_density_ghk2s=None, channel_density_non_uniforms=None, channel_density_non_uniform_nernsts=None, channel_density_non_uniform_ghks=None, spike_threshes=None, specific_capacitances=None, init_memb_potentials=None, extensiontype_=None, **kwargs_): + def __init__(self, channel_populations=None, channel_densities=None, channel_density_v_shifts=None, channel_density_nernsts=None, channel_density_ghks=None, channel_density_ghk2s=None, channel_density_non_uniforms=None, channel_density_non_uniform_nernsts=None, channel_density_non_uniform_ghks=None, spike_threshes=None, specific_capacitances=None, init_memb_potentials=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None if channel_populations is None: self.channel_populations = [] else: self.channel_populations = channel_populations + self.channel_populations_nsprefix_ = None if channel_densities is None: self.channel_densities = [] else: self.channel_densities = channel_densities + self.channel_densities_nsprefix_ = None if channel_density_v_shifts is None: self.channel_density_v_shifts = [] else: self.channel_density_v_shifts = channel_density_v_shifts + self.channel_density_v_shifts_nsprefix_ = None if channel_density_nernsts is None: self.channel_density_nernsts = [] else: self.channel_density_nernsts = channel_density_nernsts + self.channel_density_nernsts_nsprefix_ = None if channel_density_ghks is None: self.channel_density_ghks = [] else: self.channel_density_ghks = channel_density_ghks + self.channel_density_ghks_nsprefix_ = None if channel_density_ghk2s is None: self.channel_density_ghk2s = [] else: self.channel_density_ghk2s = channel_density_ghk2s + self.channel_density_ghk2s_nsprefix_ = None if channel_density_non_uniforms is None: self.channel_density_non_uniforms = [] else: self.channel_density_non_uniforms = channel_density_non_uniforms + self.channel_density_non_uniforms_nsprefix_ = None if channel_density_non_uniform_nernsts is None: self.channel_density_non_uniform_nernsts = [] else: self.channel_density_non_uniform_nernsts = channel_density_non_uniform_nernsts + self.channel_density_non_uniform_nernsts_nsprefix_ = None if channel_density_non_uniform_ghks is None: self.channel_density_non_uniform_ghks = [] else: self.channel_density_non_uniform_ghks = channel_density_non_uniform_ghks + self.channel_density_non_uniform_ghks_nsprefix_ = None if spike_threshes is None: self.spike_threshes = [] else: self.spike_threshes = spike_threshes + self.spike_threshes_nsprefix_ = None if specific_capacitances is None: self.specific_capacitances = [] else: self.specific_capacitances = specific_capacitances + self.specific_capacitances_nsprefix_ = None if init_memb_potentials is None: self.init_memb_potentials = [] else: self.init_memb_potentials = init_memb_potentials + self.init_memb_potentials_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -4158,7 +5531,133 @@ def factory(*args_, **kwargs_): else: return MembraneProperties(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_channelPopulation(self): + return self.channel_populations + def set_channelPopulation(self, channel_populations): + self.channel_populations = channel_populations + def add_channelPopulation(self, value): + self.channel_populations.append(value) + def insert_channelPopulation_at(self, index, value): + self.channel_populations.insert(index, value) + def replace_channelPopulation_at(self, index, value): + self.channel_populations[index] = value + def get_channelDensity(self): + return self.channel_densities + def set_channelDensity(self, channel_densities): + self.channel_densities = channel_densities + def add_channelDensity(self, value): + self.channel_densities.append(value) + def insert_channelDensity_at(self, index, value): + self.channel_densities.insert(index, value) + def replace_channelDensity_at(self, index, value): + self.channel_densities[index] = value + def get_channelDensityVShift(self): + return self.channel_density_v_shifts + def set_channelDensityVShift(self, channel_density_v_shifts): + self.channel_density_v_shifts = channel_density_v_shifts + def add_channelDensityVShift(self, value): + self.channel_density_v_shifts.append(value) + def insert_channelDensityVShift_at(self, index, value): + self.channel_density_v_shifts.insert(index, value) + def replace_channelDensityVShift_at(self, index, value): + self.channel_density_v_shifts[index] = value + def get_channelDensityNernst(self): + return self.channel_density_nernsts + def set_channelDensityNernst(self, channel_density_nernsts): + self.channel_density_nernsts = channel_density_nernsts + def add_channelDensityNernst(self, value): + self.channel_density_nernsts.append(value) + def insert_channelDensityNernst_at(self, index, value): + self.channel_density_nernsts.insert(index, value) + def replace_channelDensityNernst_at(self, index, value): + self.channel_density_nernsts[index] = value + def get_channelDensityGHK(self): + return self.channel_density_ghks + def set_channelDensityGHK(self, channel_density_ghks): + self.channel_density_ghks = channel_density_ghks + def add_channelDensityGHK(self, value): + self.channel_density_ghks.append(value) + def insert_channelDensityGHK_at(self, index, value): + self.channel_density_ghks.insert(index, value) + def replace_channelDensityGHK_at(self, index, value): + self.channel_density_ghks[index] = value + def get_channelDensityGHK2(self): + return self.channel_density_ghk2s + def set_channelDensityGHK2(self, channel_density_ghk2s): + self.channel_density_ghk2s = channel_density_ghk2s + def add_channelDensityGHK2(self, value): + self.channel_density_ghk2s.append(value) + def insert_channelDensityGHK2_at(self, index, value): + self.channel_density_ghk2s.insert(index, value) + def replace_channelDensityGHK2_at(self, index, value): + self.channel_density_ghk2s[index] = value + def get_channelDensityNonUniform(self): + return self.channel_density_non_uniforms + def set_channelDensityNonUniform(self, channel_density_non_uniforms): + self.channel_density_non_uniforms = channel_density_non_uniforms + def add_channelDensityNonUniform(self, value): + self.channel_density_non_uniforms.append(value) + def insert_channelDensityNonUniform_at(self, index, value): + self.channel_density_non_uniforms.insert(index, value) + def replace_channelDensityNonUniform_at(self, index, value): + self.channel_density_non_uniforms[index] = value + def get_channelDensityNonUniformNernst(self): + return self.channel_density_non_uniform_nernsts + def set_channelDensityNonUniformNernst(self, channel_density_non_uniform_nernsts): + self.channel_density_non_uniform_nernsts = channel_density_non_uniform_nernsts + def add_channelDensityNonUniformNernst(self, value): + self.channel_density_non_uniform_nernsts.append(value) + def insert_channelDensityNonUniformNernst_at(self, index, value): + self.channel_density_non_uniform_nernsts.insert(index, value) + def replace_channelDensityNonUniformNernst_at(self, index, value): + self.channel_density_non_uniform_nernsts[index] = value + def get_channelDensityNonUniformGHK(self): + return self.channel_density_non_uniform_ghks + def set_channelDensityNonUniformGHK(self, channel_density_non_uniform_ghks): + self.channel_density_non_uniform_ghks = channel_density_non_uniform_ghks + def add_channelDensityNonUniformGHK(self, value): + self.channel_density_non_uniform_ghks.append(value) + def insert_channelDensityNonUniformGHK_at(self, index, value): + self.channel_density_non_uniform_ghks.insert(index, value) + def replace_channelDensityNonUniformGHK_at(self, index, value): + self.channel_density_non_uniform_ghks[index] = value + def get_spikeThresh(self): + return self.spike_threshes + def set_spikeThresh(self, spike_threshes): + self.spike_threshes = spike_threshes + def add_spikeThresh(self, value): + self.spike_threshes.append(value) + def insert_spikeThresh_at(self, index, value): + self.spike_threshes.insert(index, value) + def replace_spikeThresh_at(self, index, value): + self.spike_threshes[index] = value + def get_specificCapacitance(self): + return self.specific_capacitances + def set_specificCapacitance(self, specific_capacitances): + self.specific_capacitances = specific_capacitances + def add_specificCapacitance(self, value): + self.specific_capacitances.append(value) + def insert_specificCapacitance_at(self, index, value): + self.specific_capacitances.insert(index, value) + def replace_specificCapacitance_at(self, index, value): + self.specific_capacitances[index] = value + def get_initMembPotential(self): + return self.init_memb_potentials + def set_initMembPotential(self, init_memb_potentials): + self.init_memb_potentials = init_memb_potentials + def add_initMembPotential(self, value): + self.init_memb_potentials.append(value) + def insert_initMembPotential_at(self, index, value): + self.init_memb_potentials.insert(index, value) + def replace_initMembPotential_at(self, index, value): + self.init_memb_potentials[index] = value + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( self.channel_populations or self.channel_densities or @@ -4176,7 +5675,7 @@ def hasContent_(self): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MembraneProperties', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='MembraneProperties', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('MembraneProperties') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -4184,146 +5683,173 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='M eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'MembraneProperties': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MembraneProperties') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MembraneProperties') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MembraneProperties', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MembraneProperties', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MembraneProperties'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MembraneProperties'): if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MembraneProperties', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='MembraneProperties', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for channelPopulation_ in self.channel_populations: + namespaceprefix_ = self.channel_populations_nsprefix_ + ':' if (UseCapturedNS_ and self.channel_populations_nsprefix_) else '' channelPopulation_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelPopulation', pretty_print=pretty_print) for channelDensity_ in self.channel_densities: + namespaceprefix_ = self.channel_densities_nsprefix_ + ':' if (UseCapturedNS_ and self.channel_densities_nsprefix_) else '' channelDensity_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensity', pretty_print=pretty_print) for channelDensityVShift_ in self.channel_density_v_shifts: + namespaceprefix_ = self.channel_density_v_shifts_nsprefix_ + ':' if (UseCapturedNS_ and self.channel_density_v_shifts_nsprefix_) else '' channelDensityVShift_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityVShift', pretty_print=pretty_print) for channelDensityNernst_ in self.channel_density_nernsts: + namespaceprefix_ = self.channel_density_nernsts_nsprefix_ + ':' if (UseCapturedNS_ and self.channel_density_nernsts_nsprefix_) else '' channelDensityNernst_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNernst', pretty_print=pretty_print) for channelDensityGHK_ in self.channel_density_ghks: + namespaceprefix_ = self.channel_density_ghks_nsprefix_ + ':' if (UseCapturedNS_ and self.channel_density_ghks_nsprefix_) else '' channelDensityGHK_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityGHK', pretty_print=pretty_print) for channelDensityGHK2_ in self.channel_density_ghk2s: + namespaceprefix_ = self.channel_density_ghk2s_nsprefix_ + ':' if (UseCapturedNS_ and self.channel_density_ghk2s_nsprefix_) else '' channelDensityGHK2_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityGHK2', pretty_print=pretty_print) for channelDensityNonUniform_ in self.channel_density_non_uniforms: + namespaceprefix_ = self.channel_density_non_uniforms_nsprefix_ + ':' if (UseCapturedNS_ and self.channel_density_non_uniforms_nsprefix_) else '' channelDensityNonUniform_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNonUniform', pretty_print=pretty_print) for channelDensityNonUniformNernst_ in self.channel_density_non_uniform_nernsts: + namespaceprefix_ = self.channel_density_non_uniform_nernsts_nsprefix_ + ':' if (UseCapturedNS_ and self.channel_density_non_uniform_nernsts_nsprefix_) else '' channelDensityNonUniformNernst_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNonUniformNernst', pretty_print=pretty_print) for channelDensityNonUniformGHK_ in self.channel_density_non_uniform_ghks: + namespaceprefix_ = self.channel_density_non_uniform_ghks_nsprefix_ + ':' if (UseCapturedNS_ and self.channel_density_non_uniform_ghks_nsprefix_) else '' channelDensityNonUniformGHK_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNonUniformGHK', pretty_print=pretty_print) for spikeThresh_ in self.spike_threshes: + namespaceprefix_ = self.spike_threshes_nsprefix_ + ':' if (UseCapturedNS_ and self.spike_threshes_nsprefix_) else '' spikeThresh_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeThresh', pretty_print=pretty_print) for specificCapacitance_ in self.specific_capacitances: + namespaceprefix_ = self.specific_capacitances_nsprefix_ + ':' if (UseCapturedNS_ and self.specific_capacitances_nsprefix_) else '' specificCapacitance_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='specificCapacitance', pretty_print=pretty_print) for initMembPotential_ in self.init_memb_potentials: + namespaceprefix_ = self.init_memb_potentials_nsprefix_ + ':' if (UseCapturedNS_ and self.init_memb_potentials_nsprefix_) else '' initMembPotential_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='initMembPotential', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'channelPopulation': obj_ = ChannelPopulation.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_populations.append(obj_) obj_.original_tagname_ = 'channelPopulation' elif nodeName_ == 'channelDensity': class_obj_ = self.get_class_obj_(child_, ChannelDensity) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_densities.append(obj_) obj_.original_tagname_ = 'channelDensity' elif nodeName_ == 'channelDensityVShift': obj_ = ChannelDensityVShift.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_v_shifts.append(obj_) obj_.original_tagname_ = 'channelDensityVShift' elif nodeName_ == 'channelDensityNernst': class_obj_ = self.get_class_obj_(child_, ChannelDensityNernst) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_nernsts.append(obj_) obj_.original_tagname_ = 'channelDensityNernst' elif nodeName_ == 'channelDensityGHK': obj_ = ChannelDensityGHK.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_ghks.append(obj_) obj_.original_tagname_ = 'channelDensityGHK' elif nodeName_ == 'channelDensityGHK2': obj_ = ChannelDensityGHK2.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_ghk2s.append(obj_) obj_.original_tagname_ = 'channelDensityGHK2' elif nodeName_ == 'channelDensityNonUniform': obj_ = ChannelDensityNonUniform.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_non_uniforms.append(obj_) obj_.original_tagname_ = 'channelDensityNonUniform' elif nodeName_ == 'channelDensityNonUniformNernst': obj_ = ChannelDensityNonUniformNernst.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_non_uniform_nernsts.append(obj_) obj_.original_tagname_ = 'channelDensityNonUniformNernst' elif nodeName_ == 'channelDensityNonUniformGHK': obj_ = ChannelDensityNonUniformGHK.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_non_uniform_ghks.append(obj_) obj_.original_tagname_ = 'channelDensityNonUniformGHK' elif nodeName_ == 'spikeThresh': obj_ = SpikeThresh.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spike_threshes.append(obj_) obj_.original_tagname_ = 'spikeThresh' elif nodeName_ == 'specificCapacitance': obj_ = SpecificCapacitance.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.specific_capacitances.append(obj_) obj_.original_tagname_ = 'specificCapacitance' elif nodeName_ == 'initMembPotential': obj_ = InitMembPotential.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.init_memb_potentials.append(obj_) obj_.original_tagname_ = 'initMembPotential' # end class MembraneProperties class MembraneProperties2CaPools(MembraneProperties): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('channel_density_nernst_ca2s', 'ChannelDensityNernstCa2', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNernstCa2', u'name': u'channelDensityNernstCa2', u'minOccurs': u'0'}, None), + MemberSpec_('channel_density_nernst_ca2s', 'ChannelDensityNernstCa2', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'channelDensityNernstCa2', 'type': 'ChannelDensityNernstCa2'}, None), ] subclass = None superclass = MembraneProperties - def __init__(self, channel_populations=None, channel_densities=None, channel_density_v_shifts=None, channel_density_nernsts=None, channel_density_ghks=None, channel_density_ghk2s=None, channel_density_non_uniforms=None, channel_density_non_uniform_nernsts=None, channel_density_non_uniform_ghks=None, spike_threshes=None, specific_capacitances=None, init_memb_potentials=None, channel_density_nernst_ca2s=None, **kwargs_): + def __init__(self, channel_populations=None, channel_densities=None, channel_density_v_shifts=None, channel_density_nernsts=None, channel_density_ghks=None, channel_density_ghk2s=None, channel_density_non_uniforms=None, channel_density_non_uniform_nernsts=None, channel_density_non_uniform_ghks=None, spike_threshes=None, specific_capacitances=None, init_memb_potentials=None, channel_density_nernst_ca2s=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(MembraneProperties2CaPools, self).__init__(channel_populations, channel_densities, channel_density_v_shifts, channel_density_nernsts, channel_density_ghks, channel_density_ghk2s, channel_density_non_uniforms, channel_density_non_uniform_nernsts, channel_density_non_uniform_ghks, spike_threshes, specific_capacitances, init_memb_potentials, **kwargs_) + self.ns_prefix_ = None + super(globals().get("MembraneProperties2CaPools"), self).__init__(channel_populations, channel_densities, channel_density_v_shifts, channel_density_nernsts, channel_density_ghks, channel_density_ghk2s, channel_density_non_uniforms, channel_density_non_uniform_nernsts, channel_density_non_uniform_ghks, spike_threshes, specific_capacitances, init_memb_potentials, **kwargs_) if channel_density_nernst_ca2s is None: self.channel_density_nernst_ca2s = [] else: self.channel_density_nernst_ca2s = channel_density_nernst_ca2s + self.channel_density_nernst_ca2s_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -4335,15 +5861,29 @@ def factory(*args_, **kwargs_): else: return MembraneProperties2CaPools(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_channelDensityNernstCa2(self): + return self.channel_density_nernst_ca2s + def set_channelDensityNernstCa2(self, channel_density_nernst_ca2s): + self.channel_density_nernst_ca2s = channel_density_nernst_ca2s + def add_channelDensityNernstCa2(self, value): + self.channel_density_nernst_ca2s.append(value) + def insert_channelDensityNernstCa2_at(self, index, value): + self.channel_density_nernst_ca2s.insert(index, value) + def replace_channelDensityNernstCa2_at(self, index, value): + self.channel_density_nernst_ca2s[index] = value + def _hasContent(self): if ( self.channel_density_nernst_ca2s or - super(MembraneProperties2CaPools, self).hasContent_() + super(MembraneProperties2CaPools, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MembraneProperties2CaPools', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='MembraneProperties2CaPools', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('MembraneProperties2CaPools') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -4351,64 +5891,76 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='M eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'MembraneProperties2CaPools': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MembraneProperties2CaPools') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MembraneProperties2CaPools') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MembraneProperties2CaPools', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MembraneProperties2CaPools', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MembraneProperties2CaPools'): - super(MembraneProperties2CaPools, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MembraneProperties2CaPools') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MembraneProperties2CaPools', fromsubclass_=False, pretty_print=True): - super(MembraneProperties2CaPools, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MembraneProperties2CaPools'): + super(MembraneProperties2CaPools, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MembraneProperties2CaPools') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='MembraneProperties2CaPools', fromsubclass_=False, pretty_print=True): + super(MembraneProperties2CaPools, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for channelDensityNernstCa2_ in self.channel_density_nernst_ca2s: + namespaceprefix_ = self.channel_density_nernst_ca2s_nsprefix_ + ':' if (UseCapturedNS_ and self.channel_density_nernst_ca2s_nsprefix_) else '' channelDensityNernstCa2_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNernstCa2', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(MembraneProperties2CaPools, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(MembraneProperties2CaPools, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'channelDensityNernstCa2': obj_ = ChannelDensityNernstCa2.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_nernst_ca2s.append(obj_) obj_.original_tagname_ = 'channelDensityNernstCa2' - super(MembraneProperties2CaPools, self).buildChildren(child_, node, nodeName_, True) + super(MembraneProperties2CaPools, self)._buildChildren(child_, node, nodeName_, True) # end class MembraneProperties2CaPools class SpikeThresh(GeneratedsSuper): - """Membrane potential at which to emit a spiking event. Note, usually - the spiking event will not be emitted again until the membrane - potential has fallen below this value and rises again to cross - it in a positive direction.""" + """SpikeThresh -- Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_('value', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'value'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segment_groups'}), ] subclass = None superclass = None - def __init__(self, value=None, segment_groups='all', **kwargs_): + def __init__(self, value=None, segment_groups='all', gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -4420,21 +5972,41 @@ def factory(*args_, **kwargs_): else: return SpikeThresh(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_value(self): + return self.value + def set_value(self, value): + self.value = value + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( ): @@ -4449,35 +6021,41 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SpikeThresh': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeThresh') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeThresh') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeThresh', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeThresh', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeThresh'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeThresh'): if self.value is not None and 'value' not in already_processed: already_processed.add('value') - outfile.write(' value=%s' % (quote_attrib(self.value), )) + outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) if self.segment_groups != "all" and 'segment_groups' not in already_processed: already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeThresh', fromsubclass_=False, pretty_print=True): + outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeThresh', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('value', node) if value is not None and 'value' not in already_processed: already_processed.add('value') @@ -4488,24 +6066,32 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('segmentGroup') self.segment_groups = value self.validate_NmlId(self.segment_groups) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class SpikeThresh class SpecificCapacitance(GeneratedsSuper): - """Capacitance per unit area""" + """SpecificCapacitance -- Capacitance per unit area + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity_specificCapacitance', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_('value', 'Nml2Quantity_specificCapacitance', 0, 0, {'use': 'required', 'name': 'value'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segment_groups'}), ] subclass = None superclass = None - def __init__(self, value=None, segment_groups='all', **kwargs_): + def __init__(self, value=None, segment_groups='all', gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -4517,21 +6103,41 @@ def factory(*args_, **kwargs_): else: return SpecificCapacitance(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_value(self): + return self.value + def set_value(self, value): + self.value = value + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups def validate_Nml2Quantity_specificCapacitance(self, value): # Validate type Nml2Quantity_specificCapacitance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_specificCapacitance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_specificCapacitance_patterns_, )) - validate_Nml2Quantity_specificCapacitance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F_per_m2|uF_per_cm2)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_specificCapacitance_patterns_, )) + validate_Nml2Quantity_specificCapacitance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F_per_m2|uF_per_cm2))$']] def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( ): @@ -4546,35 +6152,41 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SpecificCapacitance': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecificCapacitance') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecificCapacitance') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpecificCapacitance', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpecificCapacitance', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpecificCapacitance'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpecificCapacitance'): if self.value is not None and 'value' not in already_processed: already_processed.add('value') - outfile.write(' value=%s' % (quote_attrib(self.value), )) + outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) if self.segment_groups != "all" and 'segment_groups' not in already_processed: already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecificCapacitance', fromsubclass_=False, pretty_print=True): + outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecificCapacitance', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('value', node) if value is not None and 'value' not in already_processed: already_processed.add('value') @@ -4585,24 +6197,32 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('segmentGroup') self.segment_groups = value self.validate_NmlId(self.segment_groups) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class SpecificCapacitance class InitMembPotential(GeneratedsSuper): - """Explicitly set initial membrane potential for the cell""" + """InitMembPotential -- Explicitly set initial membrane potential for the cell + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_('value', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'value'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segment_groups'}), ] subclass = None superclass = None - def __init__(self, value=None, segment_groups='all', **kwargs_): + def __init__(self, value=None, segment_groups='all', gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -4614,21 +6234,41 @@ def factory(*args_, **kwargs_): else: return InitMembPotential(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_value(self): + return self.value + def set_value(self, value): + self.value = value + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( ): @@ -4643,35 +6283,41 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'InitMembPotential': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InitMembPotential') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InitMembPotential') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InitMembPotential', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InitMembPotential', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InitMembPotential'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InitMembPotential'): if self.value is not None and 'value' not in already_processed: already_processed.add('value') - outfile.write(' value=%s' % (quote_attrib(self.value), )) + outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) if self.segment_groups != "all" and 'segment_groups' not in already_processed: already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InitMembPotential', fromsubclass_=False, pretty_print=True): + outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InitMembPotential', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('value', node) if value is not None and 'value' not in already_processed: already_processed.add('value') @@ -4682,24 +6328,32 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('segmentGroup') self.segment_groups = value self.validate_NmlId(self.segment_groups) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class InitMembPotential class Resistivity(GeneratedsSuper): - """The resistivity, or specific axial resistance, of the cytoplasm""" + """Resistivity -- The resistivity, or specific axial resistance, of the cytoplasm + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity_resistivity', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_('value', 'Nml2Quantity_resistivity', 0, 0, {'use': 'required', 'name': 'value'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segment_groups'}), ] subclass = None superclass = None - def __init__(self, value=None, segment_groups='all', **kwargs_): + def __init__(self, value=None, segment_groups='all', gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -4711,21 +6365,41 @@ def factory(*args_, **kwargs_): else: return Resistivity(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_value(self): + return self.value + def set_value(self, value): + self.value = value + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups def validate_Nml2Quantity_resistivity(self, value): # Validate type Nml2Quantity_resistivity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_resistivity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_resistivity_patterns_, )) - validate_Nml2Quantity_resistivity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm_cm|kohm_cm|ohm_m)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_resistivity_patterns_, )) + validate_Nml2Quantity_resistivity_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm_cm|kohm_cm|ohm_m))$']] def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( ): @@ -4740,35 +6414,41 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='R eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Resistivity': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Resistivity') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Resistivity') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Resistivity', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Resistivity', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Resistivity'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Resistivity'): if self.value is not None and 'value' not in already_processed: already_processed.add('value') - outfile.write(' value=%s' % (quote_attrib(self.value), )) + outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) if self.segment_groups != "all" and 'segment_groups' not in already_processed: already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Resistivity', fromsubclass_=False, pretty_print=True): + outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Resistivity', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('value', node) if value is not None and 'value' not in already_processed: already_processed.add('value') @@ -4779,25 +6459,32 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('segmentGroup') self.segment_groups = value self.validate_NmlId(self.segment_groups) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class Resistivity class VariableParameter(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('parameter', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('inhomogeneous_value', 'InhomogeneousValue', 0, 1, {u'type': u'InhomogeneousValue', u'name': u'inhomogeneousValue', u'minOccurs': u'0'}, None), + MemberSpec_('parameter', 'xs:string', 0, 0, {'use': 'required', 'name': 'parameter'}), + MemberSpec_('segment_groups', 'xs:string', 0, 0, {'use': 'required', 'name': 'segment_groups'}), + MemberSpec_('inhomogeneous_value', 'InhomogeneousValue', 0, 1, {'minOccurs': '0', 'name': 'inhomogeneousValue', 'type': 'InhomogeneousValue'}, None), ] subclass = None superclass = None - def __init__(self, parameter=None, segment_groups=None, inhomogeneous_value=None, **kwargs_): + def __init__(self, parameter=None, segment_groups=None, inhomogeneous_value=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.parameter = _cast(None, parameter) + self.parameter_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None self.inhomogeneous_value = inhomogeneous_value + self.inhomogeneous_value_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -4809,14 +6496,30 @@ def factory(*args_, **kwargs_): else: return VariableParameter(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_inhomogeneousValue(self): + return self.inhomogeneous_value + def set_inhomogeneousValue(self, inhomogeneous_value): + self.inhomogeneous_value = inhomogeneous_value + def get_parameter(self): + return self.parameter + def set_parameter(self, parameter): + self.parameter = parameter + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups + def _hasContent(self): if ( self.inhomogeneous_value is not None ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VariableParameter', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='VariableParameter', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('VariableParameter') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -4824,41 +6527,48 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='V eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'VariableParameter': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VariableParameter') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VariableParameter') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VariableParameter', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VariableParameter', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VariableParameter'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VariableParameter'): if self.parameter is not None and 'parameter' not in already_processed: already_processed.add('parameter') outfile.write(' parameter=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.parameter), input_name='parameter')), )) if self.segment_groups is not None and 'segment_groups' not in already_processed: already_processed.add('segment_groups') outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VariableParameter', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='VariableParameter', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.inhomogeneous_value is not None: + namespaceprefix_ = self.inhomogeneous_value_nsprefix_ + ':' if (UseCapturedNS_ and self.inhomogeneous_value_nsprefix_) else '' self.inhomogeneous_value.export(outfile, level, namespaceprefix_, namespacedef_='', name_='inhomogeneousValue', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('parameter', node) if value is not None and 'parameter' not in already_processed: already_processed.add('parameter') @@ -4867,27 +6577,33 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'segmentGroup' not in already_processed: already_processed.add('segmentGroup') self.segment_groups = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'inhomogeneousValue': obj_ = InhomogeneousValue.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.inhomogeneous_value = obj_ obj_.original_tagname_ = 'inhomogeneousValue' # end class VariableParameter class InhomogeneousValue(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('inhomogeneous_parameters', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('value', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_('inhomogeneous_parameters', 'xs:string', 0, 0, {'use': 'required', 'name': 'inhomogeneous_parameters'}), + MemberSpec_('value', 'xs:string', 0, 0, {'use': 'required', 'name': 'value'}), ] subclass = None superclass = None - def __init__(self, inhomogeneous_parameters=None, value=None, **kwargs_): + def __init__(self, inhomogeneous_parameters=None, value=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.inhomogeneous_parameters = _cast(None, inhomogeneous_parameters) + self.inhomogeneous_parameters_nsprefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -4899,7 +6615,19 @@ def factory(*args_, **kwargs_): else: return InhomogeneousValue(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_inhomogeneousParameter(self): + return self.inhomogeneous_parameters + def set_inhomogeneousParameter(self, inhomogeneous_parameters): + self.inhomogeneous_parameters = inhomogeneous_parameters + def get_value(self): + return self.value + def set_value(self, value): + self.value = value + def _hasContent(self): if ( ): @@ -4914,35 +6642,41 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'InhomogeneousValue': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InhomogeneousValue') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InhomogeneousValue') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InhomogeneousValue', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InhomogeneousValue', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InhomogeneousValue'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InhomogeneousValue'): if self.inhomogeneous_parameters is not None and 'inhomogeneous_parameters' not in already_processed: already_processed.add('inhomogeneous_parameters') outfile.write(' inhomogeneousParameter=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.inhomogeneous_parameters), input_name='inhomogeneousParameter')), )) if self.value is not None and 'value' not in already_processed: already_processed.add('value') outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InhomogeneousValue', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InhomogeneousValue', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('inhomogeneousParameter', node) if value is not None and 'inhomogeneousParameter' not in already_processed: already_processed.add('inhomogeneousParameter') @@ -4951,34 +6685,45 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'value' not in already_processed: already_processed.add('value') self.value = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class InhomogeneousValue class Species(GeneratedsSuper): - """Specifying the ion here again is redundant, the ion name should be - the same as id. Kept for now until LEMS implementation can - select by id. TODO: remove.""" + """ion -- Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now + until LEMS implementation can select by id. TODO: remove. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('id', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('concentration_model', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('ion', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('initial_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('initial_ext_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_('id', 'NmlId', 0, 0, {'use': 'required', 'name': 'id'}), + MemberSpec_('concentration_model', 'NmlId', 0, 0, {'use': 'required', 'name': 'concentration_model'}), + MemberSpec_('ion', 'NmlId', 0, 1, {'use': 'optional', 'name': 'ion'}), + MemberSpec_('initial_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': 'required', 'name': 'initial_concentration'}), + MemberSpec_('initial_ext_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': 'required', 'name': 'initial_ext_concentration'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segment_groups'}), ] subclass = None superclass = None - def __init__(self, id=None, concentration_model=None, ion=None, initial_concentration=None, initial_ext_concentration=None, segment_groups='all', **kwargs_): + def __init__(self, id=None, concentration_model=None, ion=None, initial_concentration=None, initial_ext_concentration=None, segment_groups='all', gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.id = _cast(None, id) + self.id_nsprefix_ = None self.concentration_model = _cast(None, concentration_model) + self.concentration_model_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None self.initial_concentration = _cast(None, initial_concentration) + self.initial_concentration_nsprefix_ = None self.initial_ext_concentration = _cast(None, initial_ext_concentration) + self.initial_ext_concentration_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -4990,21 +6735,57 @@ def factory(*args_, **kwargs_): else: return Species(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_id(self): + return self.id + def set_id(self, id): + self.id = id + def get_concentrationModel(self): + return self.concentration_model + def set_concentrationModel(self, concentration_model): + self.concentration_model = concentration_model + def get_ion(self): + return self.ion + def set_ion(self, ion): + self.ion = ion + def get_initialConcentration(self): + return self.initial_concentration + def set_initialConcentration(self, initial_concentration): + self.initial_concentration = initial_concentration + def get_initialExtConcentration(self): + return self.initial_ext_concentration + def set_initialExtConcentration(self, initial_ext_concentration): + self.initial_ext_concentration = initial_ext_concentration + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_concentration(self, value): # Validate type Nml2Quantity_concentration, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_concentration_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_concentration_patterns_, )) - validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_concentration_patterns_, )) + validate_Nml2Quantity_concentration_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM))$']] + def _hasContent(self): if ( ): @@ -5019,47 +6800,53 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Species': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Species') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Species') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Species', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Species', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Species'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Species'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') - outfile.write(' id=%s' % (quote_attrib(self.id), )) + outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.concentration_model is not None and 'concentration_model' not in already_processed: already_processed.add('concentration_model') - outfile.write(' concentrationModel=%s' % (quote_attrib(self.concentration_model), )) + outfile.write(' concentrationModel=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.concentration_model), input_name='concentrationModel')), )) if self.ion is not None and 'ion' not in already_processed: already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) + outfile.write(' ion=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion), input_name='ion')), )) if self.initial_concentration is not None and 'initial_concentration' not in already_processed: already_processed.add('initial_concentration') - outfile.write(' initialConcentration=%s' % (quote_attrib(self.initial_concentration), )) + outfile.write(' initialConcentration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.initial_concentration), input_name='initialConcentration')), )) if self.initial_ext_concentration is not None and 'initial_ext_concentration' not in already_processed: already_processed.add('initial_ext_concentration') - outfile.write(' initialExtConcentration=%s' % (quote_attrib(self.initial_ext_concentration), )) + outfile.write(' initialExtConcentration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.initial_ext_concentration), input_name='initialExtConcentration')), )) if self.segment_groups != "all" and 'segment_groups' not in already_processed: already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Species', fromsubclass_=False, pretty_print=True): + outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Species', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') @@ -5090,29 +6877,35 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('segmentGroup') self.segment_groups = value self.validate_NmlId(self.segment_groups) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class Species class IntracellularProperties(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('species', 'Species', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Species', u'name': u'species', u'minOccurs': u'0'}, None), - MemberSpec_('resistivities', 'Resistivity', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Resistivity', u'name': u'resistivity', u'minOccurs': u'0'}, None), + MemberSpec_('species', 'Species', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'species', 'type': 'Species'}, None), + MemberSpec_('resistivities', 'Resistivity', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'resistivity', 'type': 'Resistivity'}, None), ] subclass = None superclass = None - def __init__(self, species=None, resistivities=None, extensiontype_=None, **kwargs_): + def __init__(self, species=None, resistivities=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None if species is None: self.species = [] else: self.species = species + self.species_nsprefix_ = None if resistivities is None: self.resistivities = [] else: self.resistivities = resistivities + self.resistivities_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -5125,7 +6918,33 @@ def factory(*args_, **kwargs_): else: return IntracellularProperties(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_species(self): + return self.species + def set_species(self, species): + self.species = species + def add_species(self, value): + self.species.append(value) + def insert_species_at(self, index, value): + self.species.insert(index, value) + def replace_species_at(self, index, value): + self.species[index] = value + def get_resistivity(self): + return self.resistivities + def set_resistivity(self, resistivities): + self.resistivities = resistivities + def add_resistivity(self, value): + self.resistivities.append(value) + def insert_resistivity_at(self, index, value): + self.resistivities.insert(index, value) + def replace_resistivity_at(self, index, value): + self.resistivities[index] = value + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( self.species or self.resistivities @@ -5133,7 +6952,7 @@ def hasContent_(self): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntracellularProperties', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='IntracellularProperties', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('IntracellularProperties') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -5141,69 +6960,85 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IntracellularProperties': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntracellularProperties') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntracellularProperties') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IntracellularProperties', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IntracellularProperties', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IntracellularProperties'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IntracellularProperties'): if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntracellularProperties', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='IntracellularProperties', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for species_ in self.species: + namespaceprefix_ = self.species_nsprefix_ + ':' if (UseCapturedNS_ and self.species_nsprefix_) else '' species_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='species', pretty_print=pretty_print) for resistivity_ in self.resistivities: + namespaceprefix_ = self.resistivities_nsprefix_ + ':' if (UseCapturedNS_ and self.resistivities_nsprefix_) else '' resistivity_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='resistivity', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'species': obj_ = Species.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.species.append(obj_) obj_.original_tagname_ = 'species' elif nodeName_ == 'resistivity': obj_ = Resistivity.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.resistivities.append(obj_) obj_.original_tagname_ = 'resistivity' # end class IntracellularProperties class IntracellularProperties2CaPools(IntracellularProperties): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = IntracellularProperties - def __init__(self, species=None, resistivities=None, **kwargs_): + def __init__(self, species=None, resistivities=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IntracellularProperties2CaPools, self).__init__(species, resistivities, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IntracellularProperties2CaPools"), self).__init__(species, resistivities, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -5215,9 +7050,13 @@ def factory(*args_, **kwargs_): else: return IntracellularProperties2CaPools(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(IntracellularProperties2CaPools, self).hasContent_() + super(IntracellularProperties2CaPools, self)._hasContent() ): return True else: @@ -5230,51 +7069,62 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IntracellularProperties2CaPools': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntracellularProperties2CaPools') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntracellularProperties2CaPools') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IntracellularProperties2CaPools', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IntracellularProperties2CaPools', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IntracellularProperties2CaPools'): - super(IntracellularProperties2CaPools, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntracellularProperties2CaPools') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntracellularProperties2CaPools', fromsubclass_=False, pretty_print=True): - super(IntracellularProperties2CaPools, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IntracellularProperties2CaPools'): + super(IntracellularProperties2CaPools, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntracellularProperties2CaPools') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntracellularProperties2CaPools', fromsubclass_=False, pretty_print=True): + super(IntracellularProperties2CaPools, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(IntracellularProperties2CaPools, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IntracellularProperties2CaPools, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(IntracellularProperties2CaPools, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(IntracellularProperties2CaPools, self)._buildChildren(child_, node, nodeName_, True) pass # end class IntracellularProperties2CaPools class ExtracellularPropertiesLocal(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('species', 'Species', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Species', u'name': u'species', u'minOccurs': u'0'}, None), + MemberSpec_('species', 'Species', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'species', 'type': 'Species'}, None), ] subclass = None superclass = None - def __init__(self, species=None, **kwargs_): + def __init__(self, species=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None if species is None: self.species = [] else: self.species = species + self.species_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -5286,14 +7136,28 @@ def factory(*args_, **kwargs_): else: return ExtracellularPropertiesLocal(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_species(self): + return self.species + def set_species(self, species): + self.species = species + def add_species(self, value): + self.species.append(value) + def insert_species_at(self, index, value): + self.species.insert(index, value) + def replace_species_at(self, index, value): + self.species[index] = value + def _hasContent(self): if ( self.species ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtracellularPropertiesLocal', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ExtracellularPropertiesLocal', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtracellularPropertiesLocal') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -5301,66 +7165,83 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ExtracellularPropertiesLocal': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtracellularPropertiesLocal') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtracellularPropertiesLocal') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtracellularPropertiesLocal', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtracellularPropertiesLocal', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtracellularPropertiesLocal'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtracellularPropertiesLocal'): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtracellularPropertiesLocal', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ExtracellularPropertiesLocal', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for species_ in self.species: + namespaceprefix_ = self.species_nsprefix_ + ':' if (UseCapturedNS_ and self.species_nsprefix_) else '' species_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='species', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): pass - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'species': obj_ = Species.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.species.append(obj_) obj_.original_tagname_ = 'species' # end class ExtracellularPropertiesLocal class SpaceStructure(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('x_spacing', 'xs:float', 0, 1, {'use': 'optional'}), - MemberSpec_('y_spacing', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('z_spacing', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('x_start', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('y_start', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('z_start', 'xs:float', 0, 1, {'use': u'optional'}), + MemberSpec_('x_spacing', 'xs:float', 0, 1, {'use': 'optional', 'name': 'x_spacing'}), + MemberSpec_('y_spacing', 'xs:float', 0, 1, {'use': 'optional', 'name': 'y_spacing'}), + MemberSpec_('z_spacing', 'xs:float', 0, 1, {'use': 'optional', 'name': 'z_spacing'}), + MemberSpec_('x_start', 'xs:float', 0, 1, {'use': 'optional', 'name': 'x_start'}), + MemberSpec_('y_start', 'xs:float', 0, 1, {'use': 'optional', 'name': 'y_start'}), + MemberSpec_('z_start', 'xs:float', 0, 1, {'use': 'optional', 'name': 'z_start'}), ] subclass = None superclass = None - def __init__(self, x_spacing=None, y_spacing=None, z_spacing=None, x_start=0, y_start=0, z_start=0, **kwargs_): + def __init__(self, x_spacing=None, y_spacing=None, z_spacing=None, x_start=0, y_start=0, z_start=0, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.x_spacing = _cast(float, x_spacing) + self.x_spacing_nsprefix_ = None self.y_spacing = _cast(float, y_spacing) + self.y_spacing_nsprefix_ = None self.z_spacing = _cast(float, z_spacing) + self.z_spacing_nsprefix_ = None self.x_start = _cast(float, x_start) + self.x_start_nsprefix_ = None self.y_start = _cast(float, y_start) + self.y_start_nsprefix_ = None self.z_start = _cast(float, z_start) + self.z_start_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -5372,7 +7253,35 @@ def factory(*args_, **kwargs_): else: return SpaceStructure(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_xSpacing(self): + return self.x_spacing + def set_xSpacing(self, x_spacing): + self.x_spacing = x_spacing + def get_ySpacing(self): + return self.y_spacing + def set_ySpacing(self, y_spacing): + self.y_spacing = y_spacing + def get_zSpacing(self): + return self.z_spacing + def set_zSpacing(self, z_spacing): + self.z_spacing = z_spacing + def get_xStart(self): + return self.x_start + def set_xStart(self, x_start): + self.x_start = x_start + def get_yStart(self): + return self.y_start + def set_yStart(self, y_start): + self.y_start = y_start + def get_zStart(self): + return self.z_start + def set_zStart(self, z_start): + self.z_start = z_start + def _hasContent(self): if ( ): @@ -5387,19 +7296,21 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SpaceStructure': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpaceStructure') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpaceStructure') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpaceStructure', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpaceStructure', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpaceStructure'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpaceStructure'): if self.x_spacing is not None and 'x_spacing' not in already_processed: already_processed.add('x_spacing') outfile.write(' xSpacing="%s"' % self.gds_format_float(self.x_spacing, input_name='xSpacing')) @@ -5418,79 +7329,79 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.z_start != 0 and 'z_start' not in already_processed: already_processed.add('z_start') outfile.write(' zStart="%s"' % self.gds_format_float(self.z_start, input_name='zStart')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpaceStructure', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpaceStructure', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('xSpacing', node) if value is not None and 'xSpacing' not in already_processed: already_processed.add('xSpacing') - try: - self.x_spacing = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (xSpacing): %s' % exp) + value = self.gds_parse_float(value, node, 'xSpacing') + self.x_spacing = value value = find_attr_value_('ySpacing', node) if value is not None and 'ySpacing' not in already_processed: already_processed.add('ySpacing') - try: - self.y_spacing = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (ySpacing): %s' % exp) + value = self.gds_parse_float(value, node, 'ySpacing') + self.y_spacing = value value = find_attr_value_('zSpacing', node) if value is not None and 'zSpacing' not in already_processed: already_processed.add('zSpacing') - try: - self.z_spacing = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (zSpacing): %s' % exp) + value = self.gds_parse_float(value, node, 'zSpacing') + self.z_spacing = value value = find_attr_value_('xStart', node) if value is not None and 'xStart' not in already_processed: already_processed.add('xStart') - try: - self.x_start = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (xStart): %s' % exp) + value = self.gds_parse_float(value, node, 'xStart') + self.x_start = value value = find_attr_value_('yStart', node) if value is not None and 'yStart' not in already_processed: already_processed.add('yStart') - try: - self.y_start = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (yStart): %s' % exp) + value = self.gds_parse_float(value, node, 'yStart') + self.y_start = value value = find_attr_value_('zStart', node) if value is not None and 'zStart' not in already_processed: already_processed.add('zStart') - try: - self.z_start = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (zStart): %s' % exp) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + value = self.gds_parse_float(value, node, 'zStart') + self.z_start = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class SpaceStructure class Layout(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('spaces', 'NmlId', 0, 1, {'use': 'optional'}), - MemberSpec_('random', 'RandomLayout', 0, 0, {u'type': u'RandomLayout', u'name': u'random'}, 5), - MemberSpec_('grid', 'GridLayout', 0, 0, {u'type': u'GridLayout', u'name': u'grid'}, 5), - MemberSpec_('unstructured', 'UnstructuredLayout', 0, 0, {u'type': u'UnstructuredLayout', u'name': u'unstructured'}, 5), + MemberSpec_('spaces', 'NmlId', 0, 1, {'use': 'optional', 'name': 'spaces'}), + MemberSpec_('random', 'RandomLayout', 0, 0, {'name': 'random', 'type': 'RandomLayout'}, 5), + MemberSpec_('grid', 'GridLayout', 0, 0, {'name': 'grid', 'type': 'GridLayout'}, 5), + MemberSpec_('unstructured', 'UnstructuredLayout', 0, 0, {'name': 'unstructured', 'type': 'UnstructuredLayout'}, 5), ] subclass = None superclass = None - def __init__(self, spaces=None, random=None, grid=None, unstructured=None, **kwargs_): + def __init__(self, spaces=None, random=None, grid=None, unstructured=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.spaces = _cast(None, spaces) + self.spaces_nsprefix_ = None self.random = random + self.random_nsprefix_ = None self.grid = grid + self.grid_nsprefix_ = None self.unstructured = unstructured + self.unstructured_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -5502,14 +7413,38 @@ def factory(*args_, **kwargs_): else: return Layout(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_random(self): + return self.random + def set_random(self, random): + self.random = random + def get_grid(self): + return self.grid + def set_grid(self, grid): + self.grid = grid + def get_unstructured(self): + return self.unstructured + def set_unstructured(self, unstructured): + self.unstructured = unstructured + def get_space(self): + return self.spaces + def set_space(self, spaces): + self.spaces = spaces def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( self.random is not None or self.grid is not None or @@ -5518,7 +7453,7 @@ def hasContent_(self): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Layout', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Layout', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Layout') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -5526,76 +7461,90 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='L eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Layout': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Layout') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Layout') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Layout', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Layout', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Layout'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Layout'): if self.spaces is not None and 'spaces' not in already_processed: already_processed.add('spaces') - outfile.write(' space=%s' % (quote_attrib(self.spaces), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Layout', fromsubclass_=False, pretty_print=True): + outfile.write(' space=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.spaces), input_name='space')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Layout', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.random is not None: + namespaceprefix_ = self.random_nsprefix_ + ':' if (UseCapturedNS_ and self.random_nsprefix_) else '' self.random.export(outfile, level, namespaceprefix_, namespacedef_='', name_='random', pretty_print=pretty_print) if self.grid is not None: + namespaceprefix_ = self.grid_nsprefix_ + ':' if (UseCapturedNS_ and self.grid_nsprefix_) else '' self.grid.export(outfile, level, namespaceprefix_, namespacedef_='', name_='grid', pretty_print=pretty_print) if self.unstructured is not None: + namespaceprefix_ = self.unstructured_nsprefix_ + ':' if (UseCapturedNS_ and self.unstructured_nsprefix_) else '' self.unstructured.export(outfile, level, namespaceprefix_, namespacedef_='', name_='unstructured', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('space', node) if value is not None and 'space' not in already_processed: already_processed.add('space') self.spaces = value self.validate_NmlId(self.spaces) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'random': obj_ = RandomLayout.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.random = obj_ obj_.original_tagname_ = 'random' elif nodeName_ == 'grid': obj_ = GridLayout.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.grid = obj_ obj_.original_tagname_ = 'grid' elif nodeName_ == 'unstructured': obj_ = UnstructuredLayout.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.unstructured = obj_ obj_.original_tagname_ = 'unstructured' # end class Layout class UnstructuredLayout(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('number', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), + MemberSpec_('number', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'number'}), ] subclass = None superclass = None - def __init__(self, number=None, **kwargs_): + def __init__(self, number=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.number = _cast(int, number) + self.number_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -5607,7 +7556,15 @@ def factory(*args_, **kwargs_): else: return UnstructuredLayout(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_number(self): + return self.number + def set_number(self, number): + self.number = number + def _hasContent(self): if ( ): @@ -5622,58 +7579,67 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='U eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'UnstructuredLayout': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UnstructuredLayout') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UnstructuredLayout') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UnstructuredLayout', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UnstructuredLayout', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UnstructuredLayout'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UnstructuredLayout'): if self.number is not None and 'number' not in already_processed: already_processed.add('number') outfile.write(' number="%s"' % self.gds_format_integer(self.number, input_name='number')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='UnstructuredLayout', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='UnstructuredLayout', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('number', node) if value is not None and 'number' not in already_processed: already_processed.add('number') - try: - self.number = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.number = self.gds_parse_integer(value, node, 'number') if self.number < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class UnstructuredLayout class RandomLayout(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('number', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('regions', 'NmlId', 0, 1, {'use': 'optional'}), + MemberSpec_('number', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'number'}), + MemberSpec_('regions', 'NmlId', 0, 1, {'use': 'optional', 'name': 'regions'}), ] subclass = None superclass = None - def __init__(self, number=None, regions=None, **kwargs_): + def __init__(self, number=None, regions=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.number = _cast(int, number) + self.number_nsprefix_ = None self.regions = _cast(None, regions) + self.regions_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -5685,14 +7651,30 @@ def factory(*args_, **kwargs_): else: return RandomLayout(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_number(self): + return self.number + def set_number(self, number): + self.number = number + def get_region(self): + return self.regions + def set_region(self, regions): + self.regions = regions def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( ): @@ -5707,42 +7689,45 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='R eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'RandomLayout': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RandomLayout') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RandomLayout') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RandomLayout', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RandomLayout', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RandomLayout'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RandomLayout'): if self.number is not None and 'number' not in already_processed: already_processed.add('number') outfile.write(' number="%s"' % self.gds_format_integer(self.number, input_name='number')) if self.regions is not None and 'regions' not in already_processed: already_processed.add('regions') - outfile.write(' region=%s' % (quote_attrib(self.regions), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RandomLayout', fromsubclass_=False, pretty_print=True): + outfile.write(' region=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regions), input_name='region')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RandomLayout', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('number', node) if value is not None and 'number' not in already_processed: already_processed.add('number') - try: - self.number = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.number = self.gds_parse_integer(value, node, 'number') if self.number < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') value = find_attr_value_('region', node) @@ -5750,25 +7735,32 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('region') self.regions = value self.validate_NmlId(self.regions) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class RandomLayout class GridLayout(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('x_size', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('y_size', 'xs:nonNegativeInteger', 0, 1, {'use': u'optional'}), - MemberSpec_('z_size', 'xs:nonNegativeInteger', 0, 1, {'use': u'optional'}), + MemberSpec_('x_size', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'x_size'}), + MemberSpec_('y_size', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'y_size'}), + MemberSpec_('z_size', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'z_size'}), ] subclass = None superclass = None - def __init__(self, x_size=None, y_size=None, z_size=None, **kwargs_): + def __init__(self, x_size=None, y_size=None, z_size=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.x_size = _cast(int, x_size) + self.x_size_nsprefix_ = None self.y_size = _cast(int, y_size) + self.y_size_nsprefix_ = None self.z_size = _cast(int, z_size) + self.z_size_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -5780,7 +7772,23 @@ def factory(*args_, **kwargs_): else: return GridLayout(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_xSize(self): + return self.x_size + def set_xSize(self, x_size): + self.x_size = x_size + def get_ySize(self): + return self.y_size + def set_ySize(self, y_size): + self.y_size = y_size + def get_zSize(self): + return self.z_size + def set_zSize(self, z_size): + self.z_size = z_size + def _hasContent(self): if ( ): @@ -5795,19 +7803,21 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GridLayout': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GridLayout') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GridLayout') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GridLayout', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GridLayout', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GridLayout'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GridLayout'): if self.x_size is not None and 'x_size' not in already_processed: already_processed.add('x_size') outfile.write(' xSize="%s"' % self.gds_format_integer(self.x_size, input_name='xSize')) @@ -5817,66 +7827,70 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.z_size is not None and 'z_size' not in already_processed: already_processed.add('z_size') outfile.write(' zSize="%s"' % self.gds_format_integer(self.z_size, input_name='zSize')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GridLayout', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GridLayout', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('xSize', node) if value is not None and 'xSize' not in already_processed: already_processed.add('xSize') - try: - self.x_size = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.x_size = self.gds_parse_integer(value, node, 'xSize') if self.x_size < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') value = find_attr_value_('ySize', node) if value is not None and 'ySize' not in already_processed: already_processed.add('ySize') - try: - self.y_size = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.y_size = self.gds_parse_integer(value, node, 'ySize') if self.y_size < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') value = find_attr_value_('zSize', node) if value is not None and 'zSize' not in already_processed: already_processed.add('zSize') - try: - self.z_size = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.z_size = self.gds_parse_integer(value, node, 'zSize') if self.z_size < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class GridLayout class Instance(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('id', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('i', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('j', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('k', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('location', 'Location', 0, 0, {u'type': u'Location', u'name': u'location'}, None), + MemberSpec_('id', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'id'}), + MemberSpec_('i', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'i'}), + MemberSpec_('j', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'j'}), + MemberSpec_('k', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'k'}), + MemberSpec_('location', 'Location', 0, 0, {'name': 'location', 'type': 'Location'}, None), ] subclass = None superclass = None - def __init__(self, id=None, i=None, j=None, k=None, location=None, **kwargs_): + def __init__(self, id=None, i=None, j=None, k=None, location=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.id = _cast(int, id) + self.id_nsprefix_ = None self.i = _cast(int, i) + self.i_nsprefix_ = None self.j = _cast(int, j) + self.j_nsprefix_ = None self.k = _cast(int, k) + self.k_nsprefix_ = None self.location = location + self.location_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -5888,14 +7902,38 @@ def factory(*args_, **kwargs_): else: return Instance(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_location(self): + return self.location + def set_location(self, location): + self.location = location + def get_id(self): + return self.id + def set_id(self, id): + self.id = id + def get_i(self): + return self.i + def set_i(self, i): + self.i = i + def get_j(self): + return self.j + def set_j(self, j): + self.j = j + def get_k(self): + return self.k + def set_k(self, k): + self.k = k + def _hasContent(self): if ( self.location is not None ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Instance', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Instance', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Instance') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -5903,20 +7941,22 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Instance': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Instance') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Instance') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Instance', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Instance', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Instance'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Instance'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id="%s"' % self.gds_format_integer(self.id, input_name='id')) @@ -5929,61 +7969,54 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.k is not None and 'k' not in already_processed: already_processed.add('k') outfile.write(' k="%s"' % self.gds_format_integer(self.k, input_name='k')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Instance', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Instance', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.location is not None: + namespaceprefix_ = self.location_nsprefix_ + ':' if (UseCapturedNS_ and self.location_nsprefix_) else '' self.location.export(outfile, level, namespaceprefix_, namespacedef_='', name_='location', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') - try: - self.id = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.id = self.gds_parse_integer(value, node, 'id') if self.id < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') value = find_attr_value_('i', node) if value is not None and 'i' not in already_processed: already_processed.add('i') - try: - self.i = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.i = self.gds_parse_integer(value, node, 'i') if self.i < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') value = find_attr_value_('j', node) if value is not None and 'j' not in already_processed: already_processed.add('j') - try: - self.j = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.j = self.gds_parse_integer(value, node, 'j') if self.j < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') value = find_attr_value_('k', node) if value is not None and 'k' not in already_processed: already_processed.add('k') - try: - self.k = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.k = self.gds_parse_integer(value, node, 'k') if self.k < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'location': obj_ = Location.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.location = obj_ obj_.original_tagname_ = 'location' @@ -5999,19 +8032,26 @@ def __repr__(self): class Location(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('x', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('y', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('z', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_('x', 'xs:float', 0, 0, {'use': 'required', 'name': 'x'}), + MemberSpec_('y', 'xs:float', 0, 0, {'use': 'required', 'name': 'y'}), + MemberSpec_('z', 'xs:float', 0, 0, {'use': 'required', 'name': 'z'}), ] subclass = None superclass = None - def __init__(self, x=None, y=None, z=None, **kwargs_): + def __init__(self, x=None, y=None, z=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.x = _cast(float, x) + self.x_nsprefix_ = None self.y = _cast(float, y) + self.y_nsprefix_ = None self.z = _cast(float, z) + self.z_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -6023,7 +8063,23 @@ def factory(*args_, **kwargs_): else: return Location(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_x(self): + return self.x + def set_x(self, x): + self.x = x + def get_y(self): + return self.y + def set_y(self, y): + self.y = y + def get_z(self): + return self.z + def set_z(self, z): + self.z = z + def _hasContent(self): if ( ): @@ -6038,19 +8094,21 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='L eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Location': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Location') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Location') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Location', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Location', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Location'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Location'): if self.x is not None and 'x' not in already_processed: already_processed.add('x') outfile.write(' x="%s"' % self.gds_format_float(self.x, input_name='x')) @@ -6060,38 +8118,36 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.z is not None and 'z' not in already_processed: already_processed.add('z') outfile.write(' z="%s"' % self.gds_format_float(self.z, input_name='z')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Location', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Location', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('x', node) if value is not None and 'x' not in already_processed: already_processed.add('x') - try: - self.x = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (x): %s' % exp) + value = self.gds_parse_float(value, node, 'x') + self.x = value value = find_attr_value_('y', node) if value is not None and 'y' not in already_processed: already_processed.add('y') - try: - self.y = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (y): %s' % exp) + value = self.gds_parse_float(value, node, 'y') + self.y = value value = find_attr_value_('z', node) if value is not None and 'z' not in already_processed: already_processed.add('z') - try: - self.z = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (z): %s' % exp) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + value = self.gds_parse_float(value, node, 'z') + self.z = value + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass def _format(self,value): @@ -6113,24 +8169,33 @@ def __repr__(self): class SynapticConnection(GeneratedsSuper): - """Single explicit connection. Introduced to test connections in LEMS. - Will probably be removed in favour of connections wrapped in - projection element""" + """SynapticConnection -- Single explicit connection. Introduced to test connections in LEMS. Will probably be removed in favour of + connections wrapped in projection element + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('from_', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('to', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('synapse', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('destination', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_('from_', 'xs:string', 0, 0, {'use': 'required', 'name': 'from_'}), + MemberSpec_('to', 'xs:string', 0, 0, {'use': 'required', 'name': 'to'}), + MemberSpec_('synapse', 'xs:string', 0, 0, {'use': 'required', 'name': 'synapse'}), + MemberSpec_('destination', 'NmlId', 0, 1, {'use': 'optional', 'name': 'destination'}), ] subclass = None superclass = None - def __init__(self, from_=None, to=None, synapse=None, destination=None, **kwargs_): + def __init__(self, from_=None, to=None, synapse=None, destination=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.from_ = _cast(None, from_) + self.from__nsprefix_ = None self.to = _cast(None, to) + self.to_nsprefix_ = None self.synapse = _cast(None, synapse) + self.synapse_nsprefix_ = None self.destination = _cast(None, destination) + self.destination_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -6142,14 +8207,38 @@ def factory(*args_, **kwargs_): else: return SynapticConnection(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_from(self): + return self.from_ + def set_from(self, from_): + self.from_ = from_ + def get_to(self): + return self.to + def set_to(self, to): + self.to = to + def get_synapse(self): + return self.synapse + def set_synapse(self, synapse): + self.synapse = synapse + def get_destination(self): + return self.destination + def set_destination(self, destination): + self.destination = destination def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( ): @@ -6164,19 +8253,21 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SynapticConnection': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SynapticConnection') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SynapticConnection') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SynapticConnection', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SynapticConnection', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SynapticConnection'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SynapticConnection'): if self.from_ is not None and 'from_' not in already_processed: already_processed.add('from_') outfile.write(' from=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.from_), input_name='from')), )) @@ -6188,17 +8279,21 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' outfile.write(' synapse=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse), input_name='synapse')), )) if self.destination is not None and 'destination' not in already_processed: already_processed.add('destination') - outfile.write(' destination=%s' % (quote_attrib(self.destination), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SynapticConnection', fromsubclass_=False, pretty_print=True): + outfile.write(' destination=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.destination), input_name='destination')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SynapticConnection', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('from', node) if value is not None and 'from' not in already_processed: already_processed.add('from') @@ -6216,7 +8311,7 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('destination') self.destination = value self.validate_NmlId(self.destination) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass def _get_cell_id(self,ref): @@ -6243,22 +8338,30 @@ def __str__(self): class ExplicitInput(GeneratedsSuper): - """Single explicit input. Introduced to test inputs in LEMS. Will - probably be removed in favour of inputs wrapped in inputList - element""" + """ExplicitInput -- Single explicit input. Introduced to test inputs in LEMS. Will probably be removed in favour of + inputs wrapped in inputList element + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('target', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('input', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('destination', 'xs:string', 0, 1, {'use': 'optional'}), + MemberSpec_('target', 'xs:string', 0, 0, {'use': 'required', 'name': 'target'}), + MemberSpec_('input', 'xs:string', 0, 0, {'use': 'required', 'name': 'input'}), + MemberSpec_('destination', 'xs:string', 0, 1, {'use': 'optional', 'name': 'destination'}), ] subclass = None superclass = None - def __init__(self, target=None, input=None, destination=None, **kwargs_): + def __init__(self, target=None, input=None, destination=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.target = _cast(None, target) + self.target_nsprefix_ = None self.input = _cast(None, input) + self.input_nsprefix_ = None self.destination = _cast(None, destination) + self.destination_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -6270,7 +8373,23 @@ def factory(*args_, **kwargs_): else: return ExplicitInput(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_target(self): + return self.target + def set_target(self, target): + self.target = target + def get_input(self): + return self.input + def set_input(self, input): + self.input = input + def get_destination(self): + return self.destination + def set_destination(self, destination): + self.destination = destination + def _hasContent(self): if ( ): @@ -6285,19 +8404,21 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ExplicitInput': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExplicitInput') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExplicitInput') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExplicitInput', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExplicitInput', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExplicitInput'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExplicitInput'): if self.target is not None and 'target' not in already_processed: already_processed.add('target') outfile.write(' target=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.target), input_name='target')), )) @@ -6307,16 +8428,20 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.destination is not None and 'destination' not in already_processed: already_processed.add('destination') outfile.write(' destination=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.destination), input_name='destination')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExplicitInput', fromsubclass_=False, pretty_print=True): + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExplicitInput', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('target', node) if value is not None and 'target' not in already_processed: already_processed.add('target') @@ -6329,7 +8454,7 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'destination' not in already_processed: already_processed.add('destination') self.destination = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass def _get_cell_id(self, id_string): @@ -6387,24 +8512,35 @@ def __str__(self): class Input(GeneratedsSuper): - """Individual input to the cell specified by target""" + """Input -- Individual input to the cell specified by target + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('id', 'NonNegativeInteger', 0, 0, {'use': u'required'}), - MemberSpec_('target', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('destination', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('segment_id', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), + MemberSpec_('id', 'NonNegativeInteger', 0, 0, {'use': 'required', 'name': 'id'}), + MemberSpec_('target', 'xs:string', 0, 0, {'use': 'required', 'name': 'target'}), + MemberSpec_('destination', 'NmlId', 0, 0, {'use': 'required', 'name': 'destination'}), + MemberSpec_('segment_id', 'NonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'segment_id'}), + MemberSpec_('fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional', 'name': 'fraction_along'}), ] subclass = None superclass = None - def __init__(self, id=None, target=None, destination=None, segment_id=None, fraction_along=None, extensiontype_=None, **kwargs_): + def __init__(self, id=None, target=None, destination=None, segment_id=None, fraction_along=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.id = _cast(int, id) + self.id_nsprefix_ = None self.target = _cast(None, target) + self.target_nsprefix_ = None self.destination = _cast(None, destination) + self.destination_nsprefix_ = None self.segment_id = _cast(int, segment_id) + self.segment_id_nsprefix_ = None self.fraction_along = _cast(float, fraction_along) + self.fraction_along_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -6417,25 +8553,67 @@ def factory(*args_, **kwargs_): else: return Input(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_id(self): + return self.id + def set_id(self, id): + self.id = id + def get_target(self): + return self.target + def set_target(self, target): + self.target = target + def get_destination(self): + return self.destination + def set_destination(self, destination): + self.destination = destination + def get_segmentId(self): + return self.segment_id + def set_segmentId(self, segment_id): + self.segment_id = segment_id + def get_fractionAlong(self): + return self.fraction_along + def set_fractionAlong(self, fraction_along): + self.fraction_along = fraction_along + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) + return False if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ZeroToOne' % {"value": value, "lineno": lineno} ) + result = False if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) - def hasContent_(self): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ZeroToOne' % {"value": value, "lineno": lineno} ) + result = False + def _hasContent(self): if ( ): @@ -6450,55 +8628,62 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Input': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Input') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Input') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Input', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Input', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Input'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Input'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') - outfile.write(' id=%s' % (quote_attrib(self.id), )) + outfile.write(' id="%s"' % self.gds_format_integer(self.id, input_name='id')) if self.target is not None and 'target' not in already_processed: already_processed.add('target') outfile.write(' target=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.target), input_name='target')), )) if self.destination is not None and 'destination' not in already_processed: already_processed.add('destination') - outfile.write(' destination=%s' % (quote_attrib(self.destination), )) + outfile.write(' destination=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.destination), input_name='destination')), )) if self.segment_id is not None and 'segment_id' not in already_processed: already_processed.add('segment_id') - outfile.write(' segmentId=%s' % (quote_attrib(self.segment_id), )) + outfile.write(' segmentId="%s"' % self.gds_format_integer(self.segment_id, input_name='segmentId')) if self.fraction_along is not None and 'fraction_along' not in already_processed: already_processed.add('fraction_along') - outfile.write(' fractionAlong=%s' % (quote_attrib(self.fraction_along), )) + outfile.write(' fractionAlong="%s"' % self.gds_format_float(self.fraction_along, input_name='fractionAlong')) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Input', fromsubclass_=False, pretty_print=True): + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Input', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') - try: - self.id = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.id = self.gds_parse_integer(value, node, 'id') if self.id < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.id) # validate type NonNegativeInteger @@ -6514,26 +8699,21 @@ def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('segmentId', node) if value is not None and 'segmentId' not in already_processed: already_processed.add('segmentId') - try: - self.segment_id = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.segment_id = self.gds_parse_integer(value, node, 'segmentId') if self.segment_id < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.segment_id) # validate type NonNegativeInteger value = find_attr_value_('fractionAlong', node) if value is not None and 'fractionAlong' not in already_processed: already_processed.add('fractionAlong') - try: - self.fraction_along = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (fractionAlong): %s' % exp) + value = self.gds_parse_float(value, node, 'fractionAlong') + self.fraction_along = value self.validate_ZeroToOne(self.fraction_along) # validate type ZeroToOne value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass def _get_cell_id(self, id_string): @@ -6570,18 +8750,24 @@ def __str__(self): class InputW(Input): - """Individual input to the cell specified by target. Includes setting - of _weight for the connection""" + """InputW -- Individual input to the cell specified by target. Includes setting of _weight for the connection + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('weight', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_('weight', 'xs:float', 0, 0, {'use': 'required', 'name': 'weight'}), ] subclass = None superclass = Input - def __init__(self, id=None, target=None, destination=None, segment_id=None, fraction_along=None, weight=None, **kwargs_): + def __init__(self, id=None, target=None, destination=None, segment_id=None, fraction_along=None, weight=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(InputW, self).__init__(id, target, destination, segment_id, fraction_along, **kwargs_) + self.ns_prefix_ = None + super(globals().get("InputW"), self).__init__(id, target, destination, segment_id, fraction_along, **kwargs_) self.weight = _cast(float, weight) + self.weight_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -6593,9 +8779,17 @@ def factory(*args_, **kwargs_): else: return InputW(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_weight(self): + return self.weight + def set_weight(self, weight): + self.weight = weight + def _hasContent(self): if ( - super(InputW, self).hasContent_() + super(InputW, self)._hasContent() ): return True else: @@ -6608,44 +8802,48 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'InputW': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputW') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputW') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InputW', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InputW', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InputW'): - super(InputW, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputW') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InputW'): + super(InputW, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputW') if self.weight is not None and 'weight' not in already_processed: already_processed.add('weight') outfile.write(' weight="%s"' % self.gds_format_float(self.weight, input_name='weight')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InputW', fromsubclass_=False, pretty_print=True): - super(InputW, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InputW', fromsubclass_=False, pretty_print=True): + super(InputW, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('weight', node) if value is not None and 'weight' not in already_processed: already_processed.add('weight') - try: - self.weight = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (weight): %s' % exp) - super(InputW, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(InputW, self).buildChildren(child_, node, nodeName_, True) + value = self.gds_parse_float(value, node, 'weight') + self.weight = value + super(InputW, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(InputW, self)._buildChildren(child_, node, nodeName_, True) pass def get_weight(self): @@ -6664,18 +8862,23 @@ def __str__(self): class BaseWithoutId(GeneratedsSuper): - """Base element without ID specified *yet*, e.g. for an element with a - particular requirement on its id which does not comply with - NmlId (e.g. Segment needs nonNegativeInteger).""" + """BaseWithoutId -- Base element without ID specified *yet*, e.g. for an element with a particular requirement on its id which does not comply with NmlId (e.g. Segment needs nonNegativeInteger). + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('neuro_lex_id', 'NeuroLexId', 0, 1, {'use': u'optional'}), + MemberSpec_('neuro_lex_id', 'NeuroLexId', 0, 1, {'use': 'optional', 'name': 'neuro_lex_id'}), ] subclass = None superclass = None - def __init__(self, neuro_lex_id=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') + self.ns_prefix_ = None self.neuro_lex_id = _cast(None, neuro_lex_id) + self.neuro_lex_id_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -6688,14 +8891,28 @@ def factory(*args_, **kwargs_): else: return BaseWithoutId(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_neuroLexId(self): + return self.neuro_lex_id + def set_neuroLexId(self, neuro_lex_id): + self.neuro_lex_id = neuro_lex_id + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NeuroLexId(self, value): # Validate type NeuroLexId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NeuroLexId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NeuroLexId_patterns_, )) - validate_NeuroLexId_patterns_ = [[u'^[a-zA-Z0-9_:]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NeuroLexId_patterns_, )) + validate_NeuroLexId_patterns_ = [['^([a-zA-Z0-9_:]*)$']] + def _hasContent(self): if ( ): @@ -6710,36 +8927,46 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseWithoutId': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseWithoutId') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseWithoutId') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseWithoutId', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseWithoutId', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseWithoutId'): + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseWithoutId'): if self.neuro_lex_id is not None and 'neuro_lex_id' not in already_processed: already_processed.add('neuro_lex_id') - outfile.write(' neuroLexId=%s' % (quote_attrib(self.neuro_lex_id), )) + outfile.write(' neuroLexId=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.neuro_lex_id), input_name='neuroLexId')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseWithoutId', fromsubclass_=False, pretty_print=True): + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseWithoutId', fromsubclass_=False, pretty_print=True): pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('neuroLexId', node) if value is not None and 'neuroLexId' not in already_processed: already_processed.add('neuroLexId') @@ -6749,24 +8976,30 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class BaseWithoutId class BaseNonNegativeIntegerId(BaseWithoutId): - """Anything which can have a unique (within its parent) id, which must - be an integer zero or greater.""" + """BaseNonNegativeIntegerId -- Anything which can have a unique (within its parent) id, which must be an integer zero or greater. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('id', 'NonNegativeInteger', 0, 0, {'use': u'required'}), + MemberSpec_('id', 'NonNegativeInteger', 0, 0, {'use': 'required', 'name': 'id'}), ] subclass = None superclass = BaseWithoutId - def __init__(self, neuro_lex_id=None, id=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BaseNonNegativeIntegerId, self).__init__(neuro_lex_id, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BaseNonNegativeIntegerId"), self).__init__(neuro_lex_id, extensiontype_, **kwargs_) self.id = _cast(int, id) + self.id_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -6779,13 +9012,27 @@ def factory(*args_, **kwargs_): else: return BaseNonNegativeIntegerId(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_id(self): + return self.id + def set_id(self, id): + self.id = id + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + def _hasContent(self): if ( - super(BaseNonNegativeIntegerId, self).hasContent_() + super(BaseNonNegativeIntegerId, self)._hasContent() ): return True else: @@ -6798,45 +9045,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseNonNegativeIntegerId': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseNonNegativeIntegerId') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseNonNegativeIntegerId') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseNonNegativeIntegerId', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseNonNegativeIntegerId', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseNonNegativeIntegerId'): - super(BaseNonNegativeIntegerId, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseNonNegativeIntegerId') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseNonNegativeIntegerId'): + super(BaseNonNegativeIntegerId, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseNonNegativeIntegerId') if self.id is not None and 'id' not in already_processed: already_processed.add('id') - outfile.write(' id=%s' % (quote_attrib(self.id), )) + outfile.write(' id="%s"' % self.gds_format_integer(self.id, input_name='id')) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseNonNegativeIntegerId', fromsubclass_=False, pretty_print=True): - super(BaseNonNegativeIntegerId, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseNonNegativeIntegerId', fromsubclass_=False, pretty_print=True): + super(BaseNonNegativeIntegerId, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') - try: - self.id = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.id = self.gds_parse_integer(value, node, 'id') if self.id < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.id) # validate type NonNegativeInteger @@ -6844,27 +9098,32 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BaseNonNegativeIntegerId, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseNonNegativeIntegerId, self).buildChildren(child_, node, nodeName_, True) + super(BaseNonNegativeIntegerId, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BaseNonNegativeIntegerId, self)._buildChildren(child_, node, nodeName_, True) pass # end class BaseNonNegativeIntegerId class Base(BaseWithoutId): - """Anything which can have a unique (within its parent) id of the form - NmlId (spaceless combination of letters, numbers and - underscore).""" + """Base -- Anything which can have a unique (within its parent) id of the form NmlId (spaceless combination of letters, numbers and underscore). + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('id', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_('id', 'NmlId', 0, 0, {'use': 'required', 'name': 'id'}), ] subclass = None superclass = BaseWithoutId - def __init__(self, neuro_lex_id=None, id=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Base, self).__init__(neuro_lex_id, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Base"), self).__init__(neuro_lex_id, extensiontype_, **kwargs_) self.id = _cast(None, id) + self.id_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -6877,16 +9136,30 @@ def factory(*args_, **kwargs_): else: return Base(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_id(self): + return self.id + def set_id(self, id): + self.id = id + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( - super(Base, self).hasContent_() + super(Base, self)._hasContent() ): return True else: @@ -6899,38 +9172,48 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Base': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Base') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Base') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Base', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Base', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Base'): - super(Base, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Base') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Base'): + super(Base, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Base') if self.id is not None and 'id' not in already_processed: already_processed.add('id') - outfile.write(' id=%s' % (quote_attrib(self.id), )) + outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Base', fromsubclass_=False, pretty_print=True): - super(Base, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Base', fromsubclass_=False, pretty_print=True): + super(Base, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') @@ -6940,36 +9223,45 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(Base, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Base, self).buildChildren(child_, node, nodeName_, True) + super(Base, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(Base, self)._buildChildren(child_, node, nodeName_, True) pass # end class Base class Standalone(Base): - """Elements which can stand alone and be referenced by id, e.g. cell, - morphology.""" + """Standalone -- Elements which can stand alone and be referenced by id, e.g. cell, morphology. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('metaid', 'MetaId', 0, 1, {'use': u'optional'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('properties', 'Property', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Property', u'name': u'property', u'minOccurs': u'0'}, None), - MemberSpec_('annotation', 'Annotation', 0, 1, {u'type': u'Annotation', u'name': u'annotation', u'minOccurs': u'0'}, None), + MemberSpec_('metaid', 'MetaId', 0, 1, {'use': 'optional', 'name': 'metaid'}), + MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {'minOccurs': '0', 'name': 'notes', 'type': 'xs:string'}, None), + MemberSpec_('properties', 'Property', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'property', 'type': 'Property'}, None), + MemberSpec_('annotation', 'Annotation', 0, 1, {'minOccurs': '0', 'name': 'annotation', 'type': 'Annotation'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Standalone, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Standalone"), self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) self.metaid = _cast(None, metaid) + self.metaid_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None if properties is None: self.properties = [] else: self.properties = properties + self.properties_nsprefix_ = None self.annotation = annotation + self.annotation_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -6982,28 +9274,66 @@ def factory(*args_, **kwargs_): else: return Standalone(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_notes(self): + return self.notes + def set_notes(self, notes): + self.notes = notes + def get_property(self): + return self.properties + def set_property(self, properties): + self.properties = properties + def add_property(self, value): + self.properties.append(value) + def insert_property_at(self, index, value): + self.properties.insert(index, value) + def replace_property_at(self, index, value): + self.properties[index] = value + def get_annotation(self): + return self.annotation + def set_annotation(self, annotation): + self.annotation = annotation + def get_metaid(self): + return self.metaid + def set_metaid(self, metaid): + self.metaid = metaid + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False pass + return result def validate_MetaId(self, value): # Validate type MetaId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_MetaId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_MetaId_patterns_, )) - validate_MetaId_patterns_ = [[u'^[a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_MetaId_patterns_, )) + validate_MetaId_patterns_ = [['^([a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( self.notes is not None or self.properties or self.annotation is not None or - super(Standalone, self).hasContent_() + super(Standalone, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Standalone', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Standalone', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Standalone') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -7011,49 +9341,62 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Standalone': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Standalone') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Standalone') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Standalone', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Standalone', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Standalone'): - super(Standalone, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Standalone') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Standalone'): + super(Standalone, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Standalone') if self.metaid is not None and 'metaid' not in already_processed: already_processed.add('metaid') - outfile.write(' metaid=%s' % (quote_attrib(self.metaid), )) + outfile.write(' metaid=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.metaid), input_name='metaid')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Standalone', fromsubclass_=False, pretty_print=True): - super(Standalone, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Standalone', fromsubclass_=False, pretty_print=True): + super(Standalone, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.notes is not None: + namespaceprefix_ = self.notes_nsprefix_ + ':' if (UseCapturedNS_ and self.notes_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) for property_ in self.properties: + namespaceprefix_ = self.properties_nsprefix_ + ':' if (UseCapturedNS_ and self.properties_nsprefix_) else '' property_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='property', pretty_print=pretty_print) if self.annotation is not None: + namespaceprefix_ = self.annotation_nsprefix_ + ':' if (UseCapturedNS_ and self.annotation_nsprefix_) else '' self.annotation.export(outfile, level, namespaceprefix_, namespacedef_='', name_='annotation', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('metaid', node) if value is not None and 'metaid' not in already_processed: already_processed.add('metaid') @@ -7063,43 +9406,52 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(Standalone, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(Standalone, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'notes') + value_ = self.gds_validate_string(value_, node, 'notes') + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) elif nodeName_ == 'property': obj_ = Property.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.properties.append(obj_) obj_.original_tagname_ = 'property' elif nodeName_ == 'annotation': obj_ = Annotation.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.annotation = obj_ obj_.original_tagname_ = 'annotation' - super(Standalone, self).buildChildren(child_, node, nodeName_, True) + super(Standalone, self)._buildChildren(child_, node, nodeName_, True) # end class Standalone class SpikeSourcePoisson(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('start', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('rate', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), + MemberSpec_('start', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'start'}), + MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'duration'}), + MemberSpec_('rate', 'Nml2Quantity_pertime', 0, 0, {'use': 'required', 'name': 'rate'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, start=None, duration=None, rate=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, start=None, duration=None, rate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeSourcePoisson, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("SpikeSourcePoisson"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.start = _cast(None, start) + self.start_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.rate = _cast(None, rate) + self.rate_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -7111,23 +9463,47 @@ def factory(*args_, **kwargs_): else: return SpikeSourcePoisson(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_start(self): + return self.start + def set_start(self, start): + self.start = start + def get_duration(self): + return self.duration + def set_duration(self, duration): + self.duration = duration + def get_rate(self): + return self.rate + def set_rate(self, rate): + self.rate = rate def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_pertime_patterns_, )) + validate_Nml2Quantity_pertime_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$']] + def _hasContent(self): if ( - super(SpikeSourcePoisson, self).hasContent_() + super(SpikeSourcePoisson, self)._hasContent() ): return True else: @@ -7140,40 +9516,46 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SpikeSourcePoisson': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeSourcePoisson') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeSourcePoisson') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeSourcePoisson', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeSourcePoisson', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeSourcePoisson'): - super(SpikeSourcePoisson, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeSourcePoisson') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeSourcePoisson'): + super(SpikeSourcePoisson, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeSourcePoisson') if self.start is not None and 'start' not in already_processed: already_processed.add('start') - outfile.write(' start=%s' % (quote_attrib(self.start), )) + outfile.write(' start=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.start), input_name='start')), )) if self.duration is not None and 'duration' not in already_processed: already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) + outfile.write(' duration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.duration), input_name='duration')), )) if self.rate is not None and 'rate' not in already_processed: already_processed.add('rate') - outfile.write(' rate=%s' % (quote_attrib(self.rate), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeSourcePoisson', fromsubclass_=False, pretty_print=True): - super(SpikeSourcePoisson, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' rate=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.rate), input_name='rate')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeSourcePoisson', fromsubclass_=False, pretty_print=True): + super(SpikeSourcePoisson, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('start', node) if value is not None and 'start' not in already_processed: already_processed.add('start') @@ -7189,38 +9571,47 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('rate') self.rate = value self.validate_Nml2Quantity_pertime(self.rate) # validate type Nml2Quantity_pertime - super(SpikeSourcePoisson, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeSourcePoisson, self).buildChildren(child_, node, nodeName_, True) + super(SpikeSourcePoisson, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(SpikeSourcePoisson, self)._buildChildren(child_, node, nodeName_, True) pass # end class SpikeSourcePoisson class InputList(Base): - """List of inputs to a population. Currents will be provided by the - specified component.""" + """InputList -- List of inputs to a population. Currents will be provided by the specified component. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('populations', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('component', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('input', 'Input', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Input', u'name': u'input', u'minOccurs': u'0'}, None), - MemberSpec_('input_ws', 'InputW', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InputW', u'name': u'inputW', u'minOccurs': u'0'}, None), + MemberSpec_('populations', 'NmlId', 0, 0, {'use': 'required', 'name': 'populations'}), + MemberSpec_('component', 'NmlId', 0, 0, {'use': 'required', 'name': 'component'}), + MemberSpec_('input', 'Input', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'input', 'type': 'Input'}, None), + MemberSpec_('input_ws', 'InputW', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'inputW', 'type': 'InputW'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, populations=None, component=None, input=None, input_ws=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, populations=None, component=None, input=None, input_ws=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(InputList, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("InputList"), self).__init__(neuro_lex_id, id, **kwargs_) self.populations = _cast(None, populations) + self.populations_nsprefix_ = None self.component = _cast(None, component) + self.component_nsprefix_ = None if input is None: self.input = [] else: self.input = input + self.input_nsprefix_ = None if input_ws is None: self.input_ws = [] else: self.input_ws = input_ws + self.input_ws_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -7232,23 +9623,59 @@ def factory(*args_, **kwargs_): else: return InputList(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_input(self): + return self.input + def set_input(self, input): + self.input = input + def add_input(self, value): + self.input.append(value) + def insert_input_at(self, index, value): + self.input.insert(index, value) + def replace_input_at(self, index, value): + self.input[index] = value + def get_inputW(self): + return self.input_ws + def set_inputW(self, input_ws): + self.input_ws = input_ws + def add_inputW(self, value): + self.input_ws.append(value) + def insert_inputW_at(self, index, value): + self.input_ws.insert(index, value) + def replace_inputW_at(self, index, value): + self.input_ws[index] = value + def get_population(self): + return self.populations + def set_population(self, populations): + self.populations = populations + def get_component(self): + return self.component + def set_component(self, component): + self.component = component def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( self.input or self.input_ws or - super(InputList, self).hasContent_() + super(InputList, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InputList', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='InputList', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('InputList') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -7256,45 +9683,53 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'InputList': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputList') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputList') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InputList', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InputList', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InputList'): - super(InputList, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputList') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InputList'): + super(InputList, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputList') if self.populations is not None and 'populations' not in already_processed: already_processed.add('populations') - outfile.write(' population=%s' % (quote_attrib(self.populations), )) + outfile.write(' population=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.populations), input_name='population')), )) if self.component is not None and 'component' not in already_processed: already_processed.add('component') - outfile.write(' component=%s' % (quote_attrib(self.component), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InputList', fromsubclass_=False, pretty_print=True): - super(InputList, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' component=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.component), input_name='component')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='InputList', fromsubclass_=False, pretty_print=True): + super(InputList, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for input_ in self.input: + namespaceprefix_ = self.input_nsprefix_ + ':' if (UseCapturedNS_ and self.input_nsprefix_) else '' input_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='input', pretty_print=pretty_print) for inputW_ in self.input_ws: + namespaceprefix_ = self.input_ws_nsprefix_ + ':' if (UseCapturedNS_ and self.input_ws_nsprefix_) else '' inputW_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='inputW', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('population', node) if value is not None and 'population' not in already_processed: already_processed.add('population') @@ -7305,20 +9740,20 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('component') self.component = value self.validate_NmlId(self.component) # validate type NmlId - super(InputList, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(InputList, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'input': class_obj_ = self.get_class_obj_(child_, Input) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.input.append(obj_) obj_.original_tagname_ = 'input' elif nodeName_ == 'inputW': obj_ = InputW.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.input_ws.append(obj_) obj_.original_tagname_ = 'inputW' - super(InputList, self).buildChildren(child_, node, nodeName_, True) + super(InputList, self)._buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): """Export to HDF5 file. """ @@ -7380,16 +9815,21 @@ def __str__(self): class BaseConnection(BaseNonNegativeIntegerId): - """Base of all synaptic connections (chemical/electrical/analog, etc.) - inside projections""" + """BaseConnection -- Base of all synaptic connections (chemical/electrical/analog, etc.) inside projections + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = BaseNonNegativeIntegerId - def __init__(self, neuro_lex_id=None, id=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConnection, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BaseConnection"), self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -7402,9 +9842,15 @@ def factory(*args_, **kwargs_): else: return BaseConnection(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( - super(BaseConnection, self).hasContent_() + super(BaseConnection, self)._hasContent() ): return True else: @@ -7417,61 +9863,78 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseConnection': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnection') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnection') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConnection', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConnection', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConnection'): - super(BaseConnection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnection') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConnection'): + super(BaseConnection, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnection') if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnection', fromsubclass_=False, pretty_print=True): - super(BaseConnection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnection', fromsubclass_=False, pretty_print=True): + super(BaseConnection, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BaseConnection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConnection, self).buildChildren(child_, node, nodeName_, True) + super(BaseConnection, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BaseConnection, self)._buildChildren(child_, node, nodeName_, True) pass # end class BaseConnection class BaseProjection(Base): - """Base for projection (set of synaptic connections) between two - populations""" + """BaseProjection -- Base for projection (set of synaptic connections) between two populations + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('presynaptic_population', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('postsynaptic_population', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_('presynaptic_population', 'NmlId', 0, 0, {'use': 'required', 'name': 'presynaptic_population'}), + MemberSpec_('postsynaptic_population', 'NmlId', 0, 0, {'use': 'required', 'name': 'postsynaptic_population'}), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BaseProjection, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BaseProjection"), self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) self.presynaptic_population = _cast(None, presynaptic_population) + self.presynaptic_population_nsprefix_ = None self.postsynaptic_population = _cast(None, postsynaptic_population) + self.postsynaptic_population_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -7484,16 +9947,34 @@ def factory(*args_, **kwargs_): else: return BaseProjection(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_presynapticPopulation(self): + return self.presynaptic_population + def set_presynapticPopulation(self, presynaptic_population): + self.presynaptic_population = presynaptic_population + def get_postsynapticPopulation(self): + return self.postsynaptic_population + def set_postsynapticPopulation(self, postsynaptic_population): + self.postsynaptic_population = postsynaptic_population + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( - super(BaseProjection, self).hasContent_() + super(BaseProjection, self)._hasContent() ): return True else: @@ -7506,41 +9987,51 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseProjection': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseProjection') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseProjection') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseProjection', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseProjection', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseProjection'): - super(BaseProjection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseProjection') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseProjection'): + super(BaseProjection, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseProjection') if self.presynaptic_population is not None and 'presynaptic_population' not in already_processed: already_processed.add('presynaptic_population') - outfile.write(' presynapticPopulation=%s' % (quote_attrib(self.presynaptic_population), )) + outfile.write(' presynapticPopulation=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.presynaptic_population), input_name='presynapticPopulation')), )) if self.postsynaptic_population is not None and 'postsynaptic_population' not in already_processed: already_processed.add('postsynaptic_population') - outfile.write(' postsynapticPopulation=%s' % (quote_attrib(self.postsynaptic_population), )) + outfile.write(' postsynapticPopulation=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.postsynaptic_population), input_name='postsynapticPopulation')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseProjection', fromsubclass_=False, pretty_print=True): - super(BaseProjection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseProjection', fromsubclass_=False, pretty_print=True): + super(BaseProjection, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('presynapticPopulation', node) if value is not None and 'presynapticPopulation' not in already_processed: already_processed.add('presynapticPopulation') @@ -7555,25 +10046,30 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BaseProjection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseProjection, self).buildChildren(child_, node, nodeName_, True) + super(BaseProjection, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BaseProjection, self)._buildChildren(child_, node, nodeName_, True) pass # end class BaseProjection class CellSet(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('select', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_('select', 'xs:string', 0, 0, {'use': 'required', 'name': 'select'}), + MemberSpec_('__ANY__', '__ANY__', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'processContents': 'skip'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, select=None, anytypeobjs_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, select=None, anytypeobjs_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(CellSet, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("CellSet"), self).__init__(neuro_lex_id, id, **kwargs_) self.select = _cast(None, select) + self.select_nsprefix_ = None if anytypeobjs_ is None: self.anytypeobjs_ = [] else: @@ -7589,15 +10085,27 @@ def factory(*args_, **kwargs_): else: return CellSet(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_anytypeobjs_(self): return self.anytypeobjs_ + def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_ + def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value) + def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value + def get_select(self): + return self.select + def set_select(self, select): + self.select = select + def _hasContent(self): if ( self.anytypeobjs_ or - super(CellSet, self).hasContent_() + super(CellSet, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CellSet', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='CellSet', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('CellSet') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -7605,77 +10113,95 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'CellSet': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CellSet') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CellSet') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CellSet', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CellSet', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CellSet'): - super(CellSet, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CellSet') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CellSet'): + super(CellSet, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CellSet') if self.select is not None and 'select' not in already_processed: already_processed.add('select') outfile.write(' select=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.select), input_name='select')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CellSet', fromsubclass_=False, pretty_print=True): - super(CellSet, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='CellSet', fromsubclass_=False, pretty_print=True): + super(CellSet, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' - for obj_ in self.anytypeobjs_: - obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if not fromsubclass_: + for obj_ in self.anytypeobjs_: + showIndent(outfile, level, pretty_print) + outfile.write(obj_) + outfile.write('\n') + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('select', node) if value is not None and 'select' not in already_processed: already_processed.add('select') self.select = value - super(CellSet, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'CellSet') - if obj_ is not None: - self.add_anytypeobjs_(obj_) - super(CellSet, self).buildChildren(child_, node, nodeName_, True) + super(CellSet, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + content_ = self.gds_build_any(child_, 'CellSet') + self.add_anytypeobjs_(content_) + super(CellSet, self)._buildChildren(child_, node, nodeName_, True) # end class CellSet class Population(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('component', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('size', 'NonNegativeInteger', 0, 1, {'use': u'optional'}), - MemberSpec_('type', 'populationTypes', 0, 1, {'use': u'optional'}), - MemberSpec_('extracellular_properties', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('layout', 'Layout', 0, 1, {u'type': u'Layout', u'name': u'layout', u'minOccurs': u'0'}, 4), - MemberSpec_('instances', 'Instance', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'Instance', u'name': u'instance'}, 4), + MemberSpec_('component', 'NmlId', 0, 0, {'use': 'required', 'name': 'component'}), + MemberSpec_('size', 'NonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'size'}), + MemberSpec_('type', 'populationTypes', 0, 1, {'use': 'optional', 'name': 'type'}), + MemberSpec_('extracellular_properties', 'NmlId', 0, 1, {'use': 'optional', 'name': 'extracellular_properties'}), + MemberSpec_('layout', 'Layout', 0, 1, {'minOccurs': '0', 'name': 'layout', 'type': 'Layout'}, 4), + MemberSpec_('instances', 'Instance', 1, 0, {'maxOccurs': 'unbounded', 'name': 'instance', 'type': 'Instance'}, 4), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, component=None, size=None, type=None, extracellular_properties=None, layout=None, instances=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, component=None, size=None, type=None, extracellular_properties=None, layout=None, instances=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Population, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Population"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.component = _cast(None, component) + self.component_nsprefix_ = None self.size = _cast(int, size) + self.size_nsprefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.extracellular_properties = _cast(None, extracellular_properties) + self.extracellular_properties_nsprefix_ = None self.layout = layout + self.layout_nsprefix_ = None if instances is None: self.instances = [] else: self.instances = instances + self.instances_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -7687,39 +10213,82 @@ def factory(*args_, **kwargs_): else: return Population(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_layout(self): + return self.layout + def set_layout(self, layout): + self.layout = layout + def get_instance(self): + return self.instances + def set_instance(self, instances): + self.instances = instances + def add_instance(self, value): + self.instances.append(value) + def insert_instance_at(self, index, value): + self.instances.insert(index, value) + def replace_instance_at(self, index, value): + self.instances[index] = value + def get_component(self): + return self.component + def set_component(self, component): + self.component = component + def get_size(self): + return self.size + def set_size(self, size): + self.size = size + def get_type(self): + return self.type + def set_type(self, type): + self.type = type + def get_extracellularProperties(self): + return self.extracellular_properties + def set_extracellularProperties(self, extracellular_properties): + self.extracellular_properties = extracellular_properties def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass def validate_populationTypes(self, value): # Validate type populationTypes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False + value = value enumerations = ['population', 'populationList'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on populationTypes' % {"value" : value.encode("utf-8")} ) - def hasContent_(self): + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on populationTypes' % {"value" : encode_str_2_3(value), "lineno": lineno} ) + result = False + def _hasContent(self): if ( self.layout is not None or self.instances or - super(Population, self).hasContent_() + super(Population, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Population', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Population', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Population') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -7727,51 +10296,59 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='P eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Population': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Population') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Population') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Population', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Population', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Population'): - super(Population, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Population') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Population'): + super(Population, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Population') if self.component is not None and 'component' not in already_processed: already_processed.add('component') - outfile.write(' component=%s' % (quote_attrib(self.component), )) + outfile.write(' component=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.component), input_name='component')), )) if self.size is not None and 'size' not in already_processed: already_processed.add('size') - outfile.write(' size=%s' % (quote_attrib(self.size), )) + outfile.write(' size="%s"' % self.gds_format_integer(self.size, input_name='size')) if self.type is not None and 'type' not in already_processed: already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) + outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) if self.extracellular_properties is not None and 'extracellular_properties' not in already_processed: already_processed.add('extracellular_properties') - outfile.write(' extracellularProperties=%s' % (quote_attrib(self.extracellular_properties), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Population', fromsubclass_=False, pretty_print=True): - super(Population, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' extracellularProperties=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.extracellular_properties), input_name='extracellularProperties')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Population', fromsubclass_=False, pretty_print=True): + super(Population, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.layout is not None: + namespaceprefix_ = self.layout_nsprefix_ + ':' if (UseCapturedNS_ and self.layout_nsprefix_) else '' self.layout.export(outfile, level, namespaceprefix_, namespacedef_='', name_='layout', pretty_print=pretty_print) for instance_ in self.instances: + namespaceprefix_ = self.instances_nsprefix_ + ':' if (UseCapturedNS_ and self.instances_nsprefix_) else '' instance_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='instance', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('component', node) if value is not None and 'component' not in already_processed: already_processed.add('component') @@ -7780,10 +10357,7 @@ def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('size', node) if value is not None and 'size' not in already_processed: already_processed.add('size') - try: - self.size = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.size = self.gds_parse_integer(value, node, 'size') if self.size < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.size) # validate type NonNegativeInteger @@ -7797,19 +10371,19 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('extracellularProperties') self.extracellular_properties = value self.validate_NmlId(self.extracellular_properties) # validate type NmlId - super(Population, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(Population, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'layout': obj_ = Layout.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.layout = obj_ obj_.original_tagname_ = 'layout' elif nodeName_ == 'instance': obj_ = Instance.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.instances.append(obj_) obj_.original_tagname_ = 'instance' - super(Population, self).buildChildren(child_, node, nodeName_, True) + super(Population, self)._buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): """Export to HDF5 file. """ @@ -7861,17 +10435,22 @@ def __str__(self): class Region(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('spaces', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_('spaces', 'NmlId', 0, 1, {'use': 'optional', 'name': 'spaces'}), + MemberSpec_('__ANY__', '__ANY__', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'processContents': 'skip'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, spaces=None, anytypeobjs_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, spaces=None, anytypeobjs_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Region, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Region"), self).__init__(neuro_lex_id, id, **kwargs_) self.spaces = _cast(None, spaces) + self.spaces_nsprefix_ = None if anytypeobjs_ is None: self.anytypeobjs_ = [] else: @@ -7887,22 +10466,38 @@ def factory(*args_, **kwargs_): else: return Region(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_anytypeobjs_(self): return self.anytypeobjs_ + def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_ + def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value) + def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value + def get_space(self): + return self.spaces + def set_space(self, spaces): + self.spaces = spaces def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( self.anytypeobjs_ or - super(Region, self).hasContent_() + super(Region, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Region', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Region', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Region') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -7910,67 +10505,81 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='R eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Region': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Region') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Region') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Region', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Region', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Region'): - super(Region, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Region') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Region'): + super(Region, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Region') if self.spaces is not None and 'spaces' not in already_processed: already_processed.add('spaces') - outfile.write(' space=%s' % (quote_attrib(self.spaces), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Region', fromsubclass_=False, pretty_print=True): - super(Region, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' space=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.spaces), input_name='space')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Region', fromsubclass_=False, pretty_print=True): + super(Region, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' - for obj_ in self.anytypeobjs_: - obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if not fromsubclass_: + for obj_ in self.anytypeobjs_: + showIndent(outfile, level, pretty_print) + outfile.write(obj_) + outfile.write('\n') + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('space', node) if value is not None and 'space' not in already_processed: already_processed.add('space') self.spaces = value self.validate_NmlId(self.spaces) # validate type NmlId - super(Region, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'Region') - if obj_ is not None: - self.add_anytypeobjs_(obj_) - super(Region, self).buildChildren(child_, node, nodeName_, True) + super(Region, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + content_ = self.gds_build_any(child_, 'Region') + self.add_anytypeobjs_(content_) + super(Region, self)._buildChildren(child_, node, nodeName_, True) # end class Region class Space(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('based_on', 'allowedSpaces', 0, 1, {'use': u'optional'}), - MemberSpec_('structure', 'SpaceStructure', 0, 1, {u'type': u'SpaceStructure', u'name': u'structure', u'minOccurs': u'0'}, None), + MemberSpec_('based_on', 'allowedSpaces', 0, 1, {'use': 'optional', 'name': 'based_on'}), + MemberSpec_('structure', 'SpaceStructure', 0, 1, {'minOccurs': '0', 'name': 'structure', 'type': 'SpaceStructure'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, based_on=None, structure=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, based_on=None, structure=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Space, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Space"), self).__init__(neuro_lex_id, id, **kwargs_) self.based_on = _cast(None, based_on) + self.based_on_nsprefix_ = None self.structure = structure + self.structure_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -7982,27 +10591,40 @@ def factory(*args_, **kwargs_): else: return Space(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_structure(self): + return self.structure + def set_structure(self, structure): + self.structure = structure + def get_basedOn(self): + return self.based_on + def set_basedOn(self, based_on): + self.based_on = based_on def validate_allowedSpaces(self, value): # Validate type allowedSpaces, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False + value = value enumerations = ['Euclidean_1D', 'Euclidean_2D', 'Euclidean_3D', 'Grid_1D', 'Grid_2D', 'Grid_3D'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on allowedSpaces' % {"value" : value.encode("utf-8")} ) - def hasContent_(self): + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on allowedSpaces' % {"value" : encode_str_2_3(value), "lineno": lineno} ) + result = False + def _hasContent(self): if ( self.structure is not None or - super(Space, self).hasContent_() + super(Space, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Space', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Space', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Space') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -8010,124 +10632,148 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Space': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Space') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Space') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Space', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Space', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Space'): - super(Space, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Space') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Space'): + super(Space, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Space') if self.based_on is not None and 'based_on' not in already_processed: already_processed.add('based_on') - outfile.write(' basedOn=%s' % (quote_attrib(self.based_on), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Space', fromsubclass_=False, pretty_print=True): - super(Space, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' basedOn=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.based_on), input_name='basedOn')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Space', fromsubclass_=False, pretty_print=True): + super(Space, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.structure is not None: + namespaceprefix_ = self.structure_nsprefix_ + ':' if (UseCapturedNS_ and self.structure_nsprefix_) else '' self.structure.export(outfile, level, namespaceprefix_, namespacedef_='', name_='structure', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('basedOn', node) if value is not None and 'basedOn' not in already_processed: already_processed.add('basedOn') self.based_on = value self.validate_allowedSpaces(self.based_on) # validate type allowedSpaces - super(Space, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(Space, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'structure': obj_ = SpaceStructure.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.structure = obj_ obj_.original_tagname_ = 'structure' - super(Space, self).buildChildren(child_, node, nodeName_, True) + super(Space, self)._buildChildren(child_, node, nodeName_, True) # end class Space class Network(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'networkTypes', 0, 1, {'use': u'optional'}), - MemberSpec_('temperature', 'Nml2Quantity_temperature', 0, 1, {'use': u'optional'}), - MemberSpec_('spaces', 'Space', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Space', u'name': u'space', u'minOccurs': u'0'}, None), - MemberSpec_('regions', 'Region', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Region', u'name': u'region', u'minOccurs': u'0'}, None), - MemberSpec_('extracellular_properties', 'ExtracellularPropertiesLocal', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExtracellularPropertiesLocal', u'name': u'extracellularProperties', u'minOccurs': u'0'}, None), - MemberSpec_('populations', 'Population', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'Population', u'name': u'population'}, None), - MemberSpec_('cell_sets', 'CellSet', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'CellSet', u'name': u'cellSet', u'minOccurs': u'0'}, None), - MemberSpec_('synaptic_connections', 'SynapticConnection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SynapticConnection', u'name': u'synapticConnection', u'minOccurs': u'0'}, None), - MemberSpec_('projections', 'Projection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Projection', u'name': u'projection', u'minOccurs': u'0'}, None), - MemberSpec_('electrical_projections', 'ElectricalProjection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ElectricalProjection', u'name': u'electricalProjection', u'minOccurs': u'0'}, None), - MemberSpec_('continuous_projections', 'ContinuousProjection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ContinuousProjection', u'name': u'continuousProjection', u'minOccurs': u'0'}, None), - MemberSpec_('explicit_inputs', 'ExplicitInput', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExplicitInput', u'name': u'explicitInput', u'minOccurs': u'0'}, None), - MemberSpec_('input_lists', 'InputList', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InputList', u'name': u'inputList', u'minOccurs': u'0'}, None), + MemberSpec_('type', 'networkTypes', 0, 1, {'use': 'optional', 'name': 'type'}), + MemberSpec_('temperature', 'Nml2Quantity_temperature', 0, 1, {'use': 'optional', 'name': 'temperature'}), + MemberSpec_('spaces', 'Space', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'space', 'type': 'Space'}, None), + MemberSpec_('regions', 'Region', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'region', 'type': 'Region'}, None), + MemberSpec_('extracellular_properties', 'ExtracellularPropertiesLocal', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'extracellularProperties', 'type': 'ExtracellularPropertiesLocal'}, None), + MemberSpec_('populations', 'Population', 1, 0, {'maxOccurs': 'unbounded', 'name': 'population', 'type': 'Population'}, None), + MemberSpec_('cell_sets', 'CellSet', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'cellSet', 'type': 'CellSet'}, None), + MemberSpec_('synaptic_connections', 'SynapticConnection', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'synapticConnection', 'type': 'SynapticConnection'}, None), + MemberSpec_('projections', 'Projection', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'projection', 'type': 'Projection'}, None), + MemberSpec_('electrical_projections', 'ElectricalProjection', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'electricalProjection', 'type': 'ElectricalProjection'}, None), + MemberSpec_('continuous_projections', 'ContinuousProjection', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'continuousProjection', 'type': 'ContinuousProjection'}, None), + MemberSpec_('explicit_inputs', 'ExplicitInput', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'explicitInput', 'type': 'ExplicitInput'}, None), + MemberSpec_('input_lists', 'InputList', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'inputList', 'type': 'InputList'}, None), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, type=None, temperature=None, spaces=None, regions=None, extracellular_properties=None, populations=None, cell_sets=None, synaptic_connections=None, projections=None, electrical_projections=None, continuous_projections=None, explicit_inputs=None, input_lists=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, type=None, temperature=None, spaces=None, regions=None, extracellular_properties=None, populations=None, cell_sets=None, synaptic_connections=None, projections=None, electrical_projections=None, continuous_projections=None, explicit_inputs=None, input_lists=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Network, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Network"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.type = _cast(None, type) + self.type_nsprefix_ = None self.temperature = _cast(None, temperature) + self.temperature_nsprefix_ = None if spaces is None: self.spaces = [] else: self.spaces = spaces + self.spaces_nsprefix_ = None if regions is None: self.regions = [] else: self.regions = regions + self.regions_nsprefix_ = None if extracellular_properties is None: self.extracellular_properties = [] else: self.extracellular_properties = extracellular_properties + self.extracellular_properties_nsprefix_ = None if populations is None: self.populations = [] else: self.populations = populations + self.populations_nsprefix_ = None if cell_sets is None: self.cell_sets = [] else: self.cell_sets = cell_sets + self.cell_sets_nsprefix_ = None if synaptic_connections is None: self.synaptic_connections = [] else: self.synaptic_connections = synaptic_connections + self.synaptic_connections_nsprefix_ = None if projections is None: self.projections = [] else: self.projections = projections + self.projections_nsprefix_ = None if electrical_projections is None: self.electrical_projections = [] else: self.electrical_projections = electrical_projections + self.electrical_projections_nsprefix_ = None if continuous_projections is None: self.continuous_projections = [] else: self.continuous_projections = continuous_projections + self.continuous_projections_nsprefix_ = None if explicit_inputs is None: self.explicit_inputs = [] else: self.explicit_inputs = explicit_inputs + self.explicit_inputs_nsprefix_ = None if input_lists is None: self.input_lists = [] else: self.input_lists = input_lists + self.input_lists_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -8139,26 +10785,153 @@ def factory(*args_, **kwargs_): else: return Network(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_space(self): + return self.spaces + def set_space(self, spaces): + self.spaces = spaces + def add_space(self, value): + self.spaces.append(value) + def insert_space_at(self, index, value): + self.spaces.insert(index, value) + def replace_space_at(self, index, value): + self.spaces[index] = value + def get_region(self): + return self.regions + def set_region(self, regions): + self.regions = regions + def add_region(self, value): + self.regions.append(value) + def insert_region_at(self, index, value): + self.regions.insert(index, value) + def replace_region_at(self, index, value): + self.regions[index] = value + def get_extracellularProperties(self): + return self.extracellular_properties + def set_extracellularProperties(self, extracellular_properties): + self.extracellular_properties = extracellular_properties + def add_extracellularProperties(self, value): + self.extracellular_properties.append(value) + def insert_extracellularProperties_at(self, index, value): + self.extracellular_properties.insert(index, value) + def replace_extracellularProperties_at(self, index, value): + self.extracellular_properties[index] = value + def get_population(self): + return self.populations + def set_population(self, populations): + self.populations = populations + def add_population(self, value): + self.populations.append(value) + def insert_population_at(self, index, value): + self.populations.insert(index, value) + def replace_population_at(self, index, value): + self.populations[index] = value + def get_cellSet(self): + return self.cell_sets + def set_cellSet(self, cell_sets): + self.cell_sets = cell_sets + def add_cellSet(self, value): + self.cell_sets.append(value) + def insert_cellSet_at(self, index, value): + self.cell_sets.insert(index, value) + def replace_cellSet_at(self, index, value): + self.cell_sets[index] = value + def get_synapticConnection(self): + return self.synaptic_connections + def set_synapticConnection(self, synaptic_connections): + self.synaptic_connections = synaptic_connections + def add_synapticConnection(self, value): + self.synaptic_connections.append(value) + def insert_synapticConnection_at(self, index, value): + self.synaptic_connections.insert(index, value) + def replace_synapticConnection_at(self, index, value): + self.synaptic_connections[index] = value + def get_projection(self): + return self.projections + def set_projection(self, projections): + self.projections = projections + def add_projection(self, value): + self.projections.append(value) + def insert_projection_at(self, index, value): + self.projections.insert(index, value) + def replace_projection_at(self, index, value): + self.projections[index] = value + def get_electricalProjection(self): + return self.electrical_projections + def set_electricalProjection(self, electrical_projections): + self.electrical_projections = electrical_projections + def add_electricalProjection(self, value): + self.electrical_projections.append(value) + def insert_electricalProjection_at(self, index, value): + self.electrical_projections.insert(index, value) + def replace_electricalProjection_at(self, index, value): + self.electrical_projections[index] = value + def get_continuousProjection(self): + return self.continuous_projections + def set_continuousProjection(self, continuous_projections): + self.continuous_projections = continuous_projections + def add_continuousProjection(self, value): + self.continuous_projections.append(value) + def insert_continuousProjection_at(self, index, value): + self.continuous_projections.insert(index, value) + def replace_continuousProjection_at(self, index, value): + self.continuous_projections[index] = value + def get_explicitInput(self): + return self.explicit_inputs + def set_explicitInput(self, explicit_inputs): + self.explicit_inputs = explicit_inputs + def add_explicitInput(self, value): + self.explicit_inputs.append(value) + def insert_explicitInput_at(self, index, value): + self.explicit_inputs.insert(index, value) + def replace_explicitInput_at(self, index, value): + self.explicit_inputs[index] = value + def get_inputList(self): + return self.input_lists + def set_inputList(self, input_lists): + self.input_lists = input_lists + def add_inputList(self, value): + self.input_lists.append(value) + def insert_inputList_at(self, index, value): + self.input_lists.insert(index, value) + def replace_inputList_at(self, index, value): + self.input_lists[index] = value + def get_type(self): + return self.type + def set_type(self, type): + self.type = type + def get_temperature(self): + return self.temperature + def set_temperature(self, temperature): + self.temperature = temperature def validate_networkTypes(self, value): # Validate type networkTypes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False + value = value enumerations = ['network', 'networkWithTemperature'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on networkTypes' % {"value" : value.encode("utf-8")} ) + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on networkTypes' % {"value" : encode_str_2_3(value), "lineno": lineno} ) + result = False def validate_Nml2Quantity_temperature(self, value): # Validate type Nml2Quantity_temperature, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_temperature_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_temperature_patterns_, )) - validate_Nml2Quantity_temperature_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_temperature_patterns_, )) + validate_Nml2Quantity_temperature_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC))$']] + def _hasContent(self): if ( self.spaces or self.regions or @@ -8171,12 +10944,12 @@ def hasContent_(self): self.continuous_projections or self.explicit_inputs or self.input_lists or - super(Network, self).hasContent_() + super(Network, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Network', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Network', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Network') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -8184,63 +10957,80 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='N eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Network': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Network') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Network') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Network', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Network', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Network'): - super(Network, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Network') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Network'): + super(Network, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Network') if self.type is not None and 'type' not in already_processed: already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) + outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) if self.temperature is not None and 'temperature' not in already_processed: already_processed.add('temperature') - outfile.write(' temperature=%s' % (quote_attrib(self.temperature), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Network', fromsubclass_=False, pretty_print=True): - super(Network, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' temperature=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.temperature), input_name='temperature')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Network', fromsubclass_=False, pretty_print=True): + super(Network, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for space_ in self.spaces: + namespaceprefix_ = self.spaces_nsprefix_ + ':' if (UseCapturedNS_ and self.spaces_nsprefix_) else '' space_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='space', pretty_print=pretty_print) for region_ in self.regions: + namespaceprefix_ = self.regions_nsprefix_ + ':' if (UseCapturedNS_ and self.regions_nsprefix_) else '' region_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='region', pretty_print=pretty_print) for extracellularProperties_ in self.extracellular_properties: + namespaceprefix_ = self.extracellular_properties_nsprefix_ + ':' if (UseCapturedNS_ and self.extracellular_properties_nsprefix_) else '' extracellularProperties_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='extracellularProperties', pretty_print=pretty_print) for population_ in self.populations: + namespaceprefix_ = self.populations_nsprefix_ + ':' if (UseCapturedNS_ and self.populations_nsprefix_) else '' population_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='population', pretty_print=pretty_print) for cellSet_ in self.cell_sets: + namespaceprefix_ = self.cell_sets_nsprefix_ + ':' if (UseCapturedNS_ and self.cell_sets_nsprefix_) else '' cellSet_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='cellSet', pretty_print=pretty_print) for synapticConnection_ in self.synaptic_connections: + namespaceprefix_ = self.synaptic_connections_nsprefix_ + ':' if (UseCapturedNS_ and self.synaptic_connections_nsprefix_) else '' synapticConnection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='synapticConnection', pretty_print=pretty_print) for projection_ in self.projections: + namespaceprefix_ = self.projections_nsprefix_ + ':' if (UseCapturedNS_ and self.projections_nsprefix_) else '' projection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='projection', pretty_print=pretty_print) for electricalProjection_ in self.electrical_projections: + namespaceprefix_ = self.electrical_projections_nsprefix_ + ':' if (UseCapturedNS_ and self.electrical_projections_nsprefix_) else '' electricalProjection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='electricalProjection', pretty_print=pretty_print) for continuousProjection_ in self.continuous_projections: + namespaceprefix_ = self.continuous_projections_nsprefix_ + ':' if (UseCapturedNS_ and self.continuous_projections_nsprefix_) else '' continuousProjection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='continuousProjection', pretty_print=pretty_print) for explicitInput_ in self.explicit_inputs: + namespaceprefix_ = self.explicit_inputs_nsprefix_ + ':' if (UseCapturedNS_ and self.explicit_inputs_nsprefix_) else '' explicitInput_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='explicitInput', pretty_print=pretty_print) for inputList_ in self.input_lists: + namespaceprefix_ = self.input_lists_nsprefix_ + ':' if (UseCapturedNS_ and self.input_lists_nsprefix_) else '' inputList_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='inputList', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') @@ -8251,64 +11041,64 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('temperature') self.temperature = value self.validate_Nml2Quantity_temperature(self.temperature) # validate type Nml2Quantity_temperature - super(Network, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(Network, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'space': obj_ = Space.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spaces.append(obj_) obj_.original_tagname_ = 'space' elif nodeName_ == 'region': obj_ = Region.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.regions.append(obj_) obj_.original_tagname_ = 'region' elif nodeName_ == 'extracellularProperties': obj_ = ExtracellularPropertiesLocal.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.extracellular_properties.append(obj_) obj_.original_tagname_ = 'extracellularProperties' elif nodeName_ == 'population': obj_ = Population.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.populations.append(obj_) obj_.original_tagname_ = 'population' elif nodeName_ == 'cellSet': obj_ = CellSet.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.cell_sets.append(obj_) obj_.original_tagname_ = 'cellSet' elif nodeName_ == 'synapticConnection': obj_ = SynapticConnection.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.synaptic_connections.append(obj_) obj_.original_tagname_ = 'synapticConnection' elif nodeName_ == 'projection': obj_ = Projection.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.projections.append(obj_) obj_.original_tagname_ = 'projection' elif nodeName_ == 'electricalProjection': obj_ = ElectricalProjection.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.electrical_projections.append(obj_) obj_.original_tagname_ = 'electricalProjection' elif nodeName_ == 'continuousProjection': obj_ = ContinuousProjection.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.continuous_projections.append(obj_) obj_.original_tagname_ = 'continuousProjection' elif nodeName_ == 'explicitInput': obj_ = ExplicitInput.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.explicit_inputs.append(obj_) obj_.original_tagname_ = 'explicitInput' elif nodeName_ == 'inputList': obj_ = InputList.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.input_lists.append(obj_) obj_.original_tagname_ = 'inputList' - super(Network, self).buildChildren(child_, node, nodeName_, True) + super(Network, self)._buildChildren(child_, node, nodeName_, True) warn_count = 0 def get_by_id(self,id): @@ -8380,24 +11170,33 @@ def exportHdf5(self, h5file, h5Group): class TransientPoissonFiringSynapse(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('average_rate', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('synapse', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('spike_target', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_('average_rate', 'Nml2Quantity_pertime', 0, 0, {'use': 'required', 'name': 'average_rate'}), + MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'delay'}), + MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'duration'}), + MemberSpec_('synapse', 'xs:string', 0, 0, {'use': 'required', 'name': 'synapse'}), + MemberSpec_('spike_target', 'xs:string', 0, 0, {'use': 'required', 'name': 'spike_target'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, delay=None, duration=None, synapse=None, spike_target=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, delay=None, duration=None, synapse=None, spike_target=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(TransientPoissonFiringSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("TransientPoissonFiringSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.average_rate = _cast(None, average_rate) + self.average_rate_nsprefix_ = None self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.synapse = _cast(None, synapse) + self.synapse_nsprefix_ = None self.spike_target = _cast(None, spike_target) + self.spike_target_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -8409,23 +11208,55 @@ def factory(*args_, **kwargs_): else: return TransientPoissonFiringSynapse(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_averageRate(self): + return self.average_rate + def set_averageRate(self, average_rate): + self.average_rate = average_rate + def get_delay(self): + return self.delay + def set_delay(self, delay): + self.delay = delay + def get_duration(self): + return self.duration + def set_duration(self, duration): + self.duration = duration + def get_synapse(self): + return self.synapse + def set_synapse(self, synapse): + self.synapse = synapse + def get_spikeTarget(self): + return self.spike_target + def set_spikeTarget(self, spike_target): + self.spike_target = spike_target def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_pertime_patterns_, )) + validate_Nml2Quantity_pertime_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$']] def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(TransientPoissonFiringSynapse, self).hasContent_() + super(TransientPoissonFiringSynapse, self)._hasContent() ): return True else: @@ -8438,46 +11269,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='T eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'TransientPoissonFiringSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TransientPoissonFiringSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TransientPoissonFiringSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TransientPoissonFiringSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TransientPoissonFiringSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TransientPoissonFiringSynapse'): - super(TransientPoissonFiringSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TransientPoissonFiringSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TransientPoissonFiringSynapse'): + super(TransientPoissonFiringSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TransientPoissonFiringSynapse') if self.average_rate is not None and 'average_rate' not in already_processed: already_processed.add('average_rate') - outfile.write(' averageRate=%s' % (quote_attrib(self.average_rate), )) + outfile.write(' averageRate=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.average_rate), input_name='averageRate')), )) if self.delay is not None and 'delay' not in already_processed: already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) + outfile.write(' delay=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.delay), input_name='delay')), )) if self.duration is not None and 'duration' not in already_processed: already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) + outfile.write(' duration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.duration), input_name='duration')), )) if self.synapse is not None and 'synapse' not in already_processed: already_processed.add('synapse') outfile.write(' synapse=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse), input_name='synapse')), )) if self.spike_target is not None and 'spike_target' not in already_processed: already_processed.add('spike_target') outfile.write(' spikeTarget=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.spike_target), input_name='spikeTarget')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TransientPoissonFiringSynapse', fromsubclass_=False, pretty_print=True): - super(TransientPoissonFiringSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TransientPoissonFiringSynapse', fromsubclass_=False, pretty_print=True): + super(TransientPoissonFiringSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('averageRate', node) if value is not None and 'averageRate' not in already_processed: already_processed.add('averageRate') @@ -8501,28 +11338,35 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'spikeTarget' not in already_processed: already_processed.add('spikeTarget') self.spike_target = value - super(TransientPoissonFiringSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(TransientPoissonFiringSynapse, self).buildChildren(child_, node, nodeName_, True) + super(TransientPoissonFiringSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(TransientPoissonFiringSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class TransientPoissonFiringSynapse class PoissonFiringSynapse(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('average_rate', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), - MemberSpec_('synapse', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('spike_target', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_('average_rate', 'Nml2Quantity_pertime', 0, 0, {'use': 'required', 'name': 'average_rate'}), + MemberSpec_('synapse', 'xs:string', 0, 0, {'use': 'required', 'name': 'synapse'}), + MemberSpec_('spike_target', 'xs:string', 0, 0, {'use': 'required', 'name': 'spike_target'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, synapse=None, spike_target=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, synapse=None, spike_target=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(PoissonFiringSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("PoissonFiringSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.average_rate = _cast(None, average_rate) + self.average_rate_nsprefix_ = None self.synapse = _cast(None, synapse) + self.synapse_nsprefix_ = None self.spike_target = _cast(None, spike_target) + self.spike_target_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -8534,16 +11378,36 @@ def factory(*args_, **kwargs_): else: return PoissonFiringSynapse(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_averageRate(self): + return self.average_rate + def set_averageRate(self, average_rate): + self.average_rate = average_rate + def get_synapse(self): + return self.synapse + def set_synapse(self, synapse): + self.synapse = synapse + def get_spikeTarget(self): + return self.spike_target + def set_spikeTarget(self, spike_target): + self.spike_target = spike_target def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_pertime_patterns_, )) + validate_Nml2Quantity_pertime_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$']] + def _hasContent(self): if ( - super(PoissonFiringSynapse, self).hasContent_() + super(PoissonFiringSynapse, self)._hasContent() ): return True else: @@ -8556,40 +11420,46 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='P eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'PoissonFiringSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PoissonFiringSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PoissonFiringSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PoissonFiringSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PoissonFiringSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PoissonFiringSynapse'): - super(PoissonFiringSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PoissonFiringSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PoissonFiringSynapse'): + super(PoissonFiringSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PoissonFiringSynapse') if self.average_rate is not None and 'average_rate' not in already_processed: already_processed.add('average_rate') - outfile.write(' averageRate=%s' % (quote_attrib(self.average_rate), )) + outfile.write(' averageRate=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.average_rate), input_name='averageRate')), )) if self.synapse is not None and 'synapse' not in already_processed: already_processed.add('synapse') outfile.write(' synapse=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse), input_name='synapse')), )) if self.spike_target is not None and 'spike_target' not in already_processed: already_processed.add('spike_target') outfile.write(' spikeTarget=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.spike_target), input_name='spikeTarget')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PoissonFiringSynapse', fromsubclass_=False, pretty_print=True): - super(PoissonFiringSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PoissonFiringSynapse', fromsubclass_=False, pretty_print=True): + super(PoissonFiringSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('averageRate', node) if value is not None and 'averageRate' not in already_processed: already_processed.add('averageRate') @@ -8603,24 +11473,29 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'spikeTarget' not in already_processed: already_processed.add('spikeTarget') self.spike_target = value - super(PoissonFiringSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(PoissonFiringSynapse, self).buildChildren(child_, node, nodeName_, True) + super(PoissonFiringSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(PoissonFiringSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class PoissonFiringSynapse class SpikeGeneratorPoisson(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('average_rate', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), + MemberSpec_('average_rate', 'Nml2Quantity_pertime', 0, 0, {'use': 'required', 'name': 'average_rate'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeGeneratorPoisson, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("SpikeGeneratorPoisson"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.average_rate = _cast(None, average_rate) + self.average_rate_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -8633,16 +11508,30 @@ def factory(*args_, **kwargs_): else: return SpikeGeneratorPoisson(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_averageRate(self): + return self.average_rate + def set_averageRate(self, average_rate): + self.average_rate = average_rate + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_pertime_patterns_, )) + validate_Nml2Quantity_pertime_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$']] + def _hasContent(self): if ( - super(SpikeGeneratorPoisson, self).hasContent_() + super(SpikeGeneratorPoisson, self)._hasContent() ): return True else: @@ -8655,38 +11544,48 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SpikeGeneratorPoisson': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorPoisson') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorPoisson') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGeneratorPoisson', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGeneratorPoisson', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGeneratorPoisson'): - super(SpikeGeneratorPoisson, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorPoisson') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGeneratorPoisson'): + super(SpikeGeneratorPoisson, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorPoisson') if self.average_rate is not None and 'average_rate' not in already_processed: already_processed.add('average_rate') - outfile.write(' averageRate=%s' % (quote_attrib(self.average_rate), )) + outfile.write(' averageRate=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.average_rate), input_name='averageRate')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorPoisson', fromsubclass_=False, pretty_print=True): - super(SpikeGeneratorPoisson, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorPoisson', fromsubclass_=False, pretty_print=True): + super(SpikeGeneratorPoisson, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('averageRate', node) if value is not None and 'averageRate' not in already_processed: already_processed.add('averageRate') @@ -8696,26 +11595,32 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(SpikeGeneratorPoisson, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeGeneratorPoisson, self).buildChildren(child_, node, nodeName_, True) + super(SpikeGeneratorPoisson, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(SpikeGeneratorPoisson, self)._buildChildren(child_, node, nodeName_, True) pass # end class SpikeGeneratorPoisson class SpikeGeneratorRandom(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('max_isi', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('min_isi', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('max_isi', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'max_isi'}), + MemberSpec_('min_isi', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'min_isi'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, max_isi=None, min_isi=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, max_isi=None, min_isi=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeGeneratorRandom, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("SpikeGeneratorRandom"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.max_isi = _cast(None, max_isi) + self.max_isi_nsprefix_ = None self.min_isi = _cast(None, min_isi) + self.min_isi_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -8727,16 +11632,32 @@ def factory(*args_, **kwargs_): else: return SpikeGeneratorRandom(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_maxISI(self): + return self.max_isi + def set_maxISI(self, max_isi): + self.max_isi = max_isi + def get_minISI(self): + return self.min_isi + def set_minISI(self, min_isi): + self.min_isi = min_isi def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(SpikeGeneratorRandom, self).hasContent_() + super(SpikeGeneratorRandom, self)._hasContent() ): return True else: @@ -8749,37 +11670,43 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SpikeGeneratorRandom': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRandom') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRandom') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGeneratorRandom', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGeneratorRandom', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGeneratorRandom'): - super(SpikeGeneratorRandom, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRandom') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGeneratorRandom'): + super(SpikeGeneratorRandom, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRandom') if self.max_isi is not None and 'max_isi' not in already_processed: already_processed.add('max_isi') - outfile.write(' maxISI=%s' % (quote_attrib(self.max_isi), )) + outfile.write(' maxISI=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.max_isi), input_name='maxISI')), )) if self.min_isi is not None and 'min_isi' not in already_processed: already_processed.add('min_isi') - outfile.write(' minISI=%s' % (quote_attrib(self.min_isi), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorRandom', fromsubclass_=False, pretty_print=True): - super(SpikeGeneratorRandom, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' minISI=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.min_isi), input_name='minISI')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorRandom', fromsubclass_=False, pretty_print=True): + super(SpikeGeneratorRandom, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('maxISI', node) if value is not None and 'maxISI' not in already_processed: already_processed.add('maxISI') @@ -8790,24 +11717,29 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('minISI') self.min_isi = value self.validate_Nml2Quantity_time(self.min_isi) # validate type Nml2Quantity_time - super(SpikeGeneratorRandom, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeGeneratorRandom, self).buildChildren(child_, node, nodeName_, True) + super(SpikeGeneratorRandom, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(SpikeGeneratorRandom, self)._buildChildren(child_, node, nodeName_, True) pass # end class SpikeGeneratorRandom class SpikeGenerator(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('period', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('period', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'period'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, period=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, period=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeGenerator, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("SpikeGenerator"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.period = _cast(None, period) + self.period_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -8819,16 +11751,28 @@ def factory(*args_, **kwargs_): else: return SpikeGenerator(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_period(self): + return self.period + def set_period(self, period): + self.period = period def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(SpikeGenerator, self).hasContent_() + super(SpikeGenerator, self)._hasContent() ): return True else: @@ -8841,64 +11785,77 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SpikeGenerator': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGenerator') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGenerator') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGenerator', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGenerator', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGenerator'): - super(SpikeGenerator, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGenerator') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGenerator'): + super(SpikeGenerator, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGenerator') if self.period is not None and 'period' not in already_processed: already_processed.add('period') - outfile.write(' period=%s' % (quote_attrib(self.period), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGenerator', fromsubclass_=False, pretty_print=True): - super(SpikeGenerator, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' period=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.period), input_name='period')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGenerator', fromsubclass_=False, pretty_print=True): + super(SpikeGenerator, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('period', node) if value is not None and 'period' not in already_processed: already_processed.add('period') self.period = value self.validate_Nml2Quantity_time(self.period) # validate type Nml2Quantity_time - super(SpikeGenerator, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeGenerator, self).buildChildren(child_, node, nodeName_, True) + super(SpikeGenerator, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(SpikeGenerator, self)._buildChildren(child_, node, nodeName_, True) pass # end class SpikeGenerator class TimedSynapticInput(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('synapse', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('spike_target', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('spikes', 'Spike', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Spike', u'name': u'spike', u'minOccurs': u'0'}, None), + MemberSpec_('synapse', 'NmlId', 0, 0, {'use': 'required', 'name': 'synapse'}), + MemberSpec_('spike_target', 'xs:string', 0, 0, {'use': 'required', 'name': 'spike_target'}), + MemberSpec_('spikes', 'Spike', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'spike', 'type': 'Spike'}, None), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, synapse=None, spike_target=None, spikes=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, synapse=None, spike_target=None, spikes=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(TimedSynapticInput, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("TimedSynapticInput"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.synapse = _cast(None, synapse) + self.synapse_nsprefix_ = None self.spike_target = _cast(None, spike_target) + self.spike_target_nsprefix_ = None if spikes is None: self.spikes = [] else: self.spikes = spikes + self.spikes_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -8910,22 +11867,48 @@ def factory(*args_, **kwargs_): else: return TimedSynapticInput(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_spike(self): + return self.spikes + def set_spike(self, spikes): + self.spikes = spikes + def add_spike(self, value): + self.spikes.append(value) + def insert_spike_at(self, index, value): + self.spikes.insert(index, value) + def replace_spike_at(self, index, value): + self.spikes[index] = value + def get_synapse(self): + return self.synapse + def set_synapse(self, synapse): + self.synapse = synapse + def get_spikeTarget(self): + return self.spike_target + def set_spikeTarget(self, spike_target): + self.spike_target = spike_target def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( self.spikes or - super(TimedSynapticInput, self).hasContent_() + super(TimedSynapticInput, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimedSynapticInput', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='TimedSynapticInput', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TimedSynapticInput') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -8933,43 +11916,50 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='T eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'TimedSynapticInput': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimedSynapticInput') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimedSynapticInput') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TimedSynapticInput', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TimedSynapticInput', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TimedSynapticInput'): - super(TimedSynapticInput, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimedSynapticInput') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TimedSynapticInput'): + super(TimedSynapticInput, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimedSynapticInput') if self.synapse is not None and 'synapse' not in already_processed: already_processed.add('synapse') - outfile.write(' synapse=%s' % (quote_attrib(self.synapse), )) + outfile.write(' synapse=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse), input_name='synapse')), )) if self.spike_target is not None and 'spike_target' not in already_processed: already_processed.add('spike_target') outfile.write(' spikeTarget=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.spike_target), input_name='spikeTarget')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimedSynapticInput', fromsubclass_=False, pretty_print=True): - super(TimedSynapticInput, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='TimedSynapticInput', fromsubclass_=False, pretty_print=True): + super(TimedSynapticInput, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for spike_ in self.spikes: + namespaceprefix_ = self.spikes_nsprefix_ + ':' if (UseCapturedNS_ and self.spikes_nsprefix_) else '' spike_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spike', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('synapse', node) if value is not None and 'synapse' not in already_processed: already_processed.add('synapse') @@ -8979,31 +11969,36 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'spikeTarget' not in already_processed: already_processed.add('spikeTarget') self.spike_target = value - super(TimedSynapticInput, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(TimedSynapticInput, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'spike': obj_ = Spike.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spikes.append(obj_) obj_.original_tagname_ = 'spike' - super(TimedSynapticInput, self).buildChildren(child_, node, nodeName_, True) + super(TimedSynapticInput, self)._buildChildren(child_, node, nodeName_, True) # end class TimedSynapticInput class SpikeArray(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('spikes', 'Spike', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Spike', u'name': u'spike', u'minOccurs': u'0'}, None), + MemberSpec_('spikes', 'Spike', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'spike', 'type': 'Spike'}, None), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, spikes=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, spikes=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeArray, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("SpikeArray"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) if spikes is None: self.spikes = [] else: self.spikes = spikes + self.spikes_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -9015,15 +12010,29 @@ def factory(*args_, **kwargs_): else: return SpikeArray(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_spike(self): + return self.spikes + def set_spike(self, spikes): + self.spikes = spikes + def add_spike(self, value): + self.spikes.append(value) + def insert_spike_at(self, index, value): + self.spikes.insert(index, value) + def replace_spike_at(self, index, value): + self.spikes[index] = value + def _hasContent(self): if ( self.spikes or - super(SpikeArray, self).hasContent_() + super(SpikeArray, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeArray', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='SpikeArray', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeArray') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -9031,59 +12040,71 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SpikeArray': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeArray') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeArray') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeArray', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeArray', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeArray'): - super(SpikeArray, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeArray') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeArray', fromsubclass_=False, pretty_print=True): - super(SpikeArray, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeArray'): + super(SpikeArray, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeArray') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='SpikeArray', fromsubclass_=False, pretty_print=True): + super(SpikeArray, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for spike_ in self.spikes: + namespaceprefix_ = self.spikes_nsprefix_ + ':' if (UseCapturedNS_ and self.spikes_nsprefix_) else '' spike_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spike', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(SpikeArray, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(SpikeArray, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'spike': obj_ = Spike.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spikes.append(obj_) obj_.original_tagname_ = 'spike' - super(SpikeArray, self).buildChildren(child_, node, nodeName_, True) + super(SpikeArray, self)._buildChildren(child_, node, nodeName_, True) # end class SpikeArray class Spike(BaseNonNegativeIntegerId): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('time', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('time', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'time'}), ] subclass = None superclass = BaseNonNegativeIntegerId - def __init__(self, neuro_lex_id=None, id=None, time=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, time=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Spike, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Spike"), self).__init__(neuro_lex_id, id, **kwargs_) self.time = _cast(None, time) + self.time_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -9095,16 +12116,28 @@ def factory(*args_, **kwargs_): else: return Spike(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_time(self): + return self.time + def set_time(self, time): + self.time = time def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(Spike, self).hasContent_() + super(Spike, self)._hasContent() ): return True else: @@ -9117,69 +12150,86 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Spike': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Spike') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Spike') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Spike', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Spike', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Spike'): - super(Spike, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Spike') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Spike'): + super(Spike, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Spike') if self.time is not None and 'time' not in already_processed: already_processed.add('time') - outfile.write(' time=%s' % (quote_attrib(self.time), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Spike', fromsubclass_=False, pretty_print=True): - super(Spike, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' time=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.time), input_name='time')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Spike', fromsubclass_=False, pretty_print=True): + super(Spike, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('time', node) if value is not None and 'time' not in already_processed: already_processed.add('time') self.time = value self.validate_Nml2Quantity_time(self.time) # validate type Nml2Quantity_time - super(Spike, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Spike, self).buildChildren(child_, node, nodeName_, True) + super(Spike, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(Spike, self)._buildChildren(child_, node, nodeName_, True) pass # end class Spike class VoltageClampTriple(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('active', 'ZeroOrOne', 0, 0, {'use': u'required'}), - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('conditioning_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('testing_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('return_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('simple_series_resistance', 'Nml2Quantity_resistance', 0, 0, {'use': u'required'}), + MemberSpec_('active', 'ZeroOrOne', 0, 0, {'use': 'required', 'name': 'active'}), + MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'delay'}), + MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'duration'}), + MemberSpec_('conditioning_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'conditioning_voltage'}), + MemberSpec_('testing_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'testing_voltage'}), + MemberSpec_('return_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'return_voltage'}), + MemberSpec_('simple_series_resistance', 'Nml2Quantity_resistance', 0, 0, {'use': 'required', 'name': 'simple_series_resistance'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, active=None, delay=None, duration=None, conditioning_voltage=None, testing_voltage=None, return_voltage=None, simple_series_resistance=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, active=None, delay=None, duration=None, conditioning_voltage=None, testing_voltage=None, return_voltage=None, simple_series_resistance=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(VoltageClampTriple, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("VoltageClampTriple"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.active = _cast(float, active) + self.active_nsprefix_ = None self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.conditioning_voltage = _cast(None, conditioning_voltage) + self.conditioning_voltage_nsprefix_ = None self.testing_voltage = _cast(None, testing_voltage) + self.testing_voltage_nsprefix_ = None self.return_voltage = _cast(None, return_voltage) + self.return_voltage_nsprefix_ = None self.simple_series_resistance = _cast(None, simple_series_resistance) + self.simple_series_resistance_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -9191,42 +12241,87 @@ def factory(*args_, **kwargs_): else: return VoltageClampTriple(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_active(self): + return self.active + def set_active(self, active): + self.active = active + def get_delay(self): + return self.delay + def set_delay(self, delay): + self.delay = delay + def get_duration(self): + return self.duration + def set_duration(self, duration): + self.duration = duration + def get_conditioningVoltage(self): + return self.conditioning_voltage + def set_conditioningVoltage(self, conditioning_voltage): + self.conditioning_voltage = conditioning_voltage + def get_testingVoltage(self): + return self.testing_voltage + def set_testingVoltage(self, testing_voltage): + self.testing_voltage = testing_voltage + def get_returnVoltage(self): + return self.return_voltage + def set_returnVoltage(self, return_voltage): + self.return_voltage = return_voltage + def get_simpleSeriesResistance(self): + return self.simple_series_resistance + def set_simpleSeriesResistance(self, simple_series_resistance): + self.simple_series_resistance = simple_series_resistance def validate_ZeroOrOne(self, value): # Validate type ZeroOrOne, a restriction on xs:double. - if value is not None and Validate_simpletypes_: - value = str(value) - enumerations = ['0', '1'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on ZeroOrOne' % {"value" : value.encode("utf-8")} ) + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) + return False + value = value + enumerations = [0.0, 1.0] + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ZeroOrOne' % {"value" : encode_str_2_3(value), "lineno": lineno} ) + result = False def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] def validate_Nml2Quantity_resistance(self, value): # Validate type Nml2Quantity_resistance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_resistance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_resistance_patterns_, )) - validate_Nml2Quantity_resistance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm|kohm|Mohm)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_resistance_patterns_, )) + validate_Nml2Quantity_resistance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm|kohm|Mohm))$']] + def _hasContent(self): if ( - super(VoltageClampTriple, self).hasContent_() + super(VoltageClampTriple, self)._hasContent() ): return True else: @@ -9239,59 +12334,63 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='V eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'VoltageClampTriple': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClampTriple') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClampTriple') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VoltageClampTriple', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VoltageClampTriple', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VoltageClampTriple'): - super(VoltageClampTriple, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClampTriple') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VoltageClampTriple'): + super(VoltageClampTriple, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClampTriple') if self.active is not None and 'active' not in already_processed: already_processed.add('active') - outfile.write(' active=%s' % (quote_attrib(self.active), )) + outfile.write(' active="%s"' % self.gds_format_double(self.active, input_name='active')) if self.delay is not None and 'delay' not in already_processed: already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) + outfile.write(' delay=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.delay), input_name='delay')), )) if self.duration is not None and 'duration' not in already_processed: already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) + outfile.write(' duration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.duration), input_name='duration')), )) if self.conditioning_voltage is not None and 'conditioning_voltage' not in already_processed: already_processed.add('conditioning_voltage') - outfile.write(' conditioningVoltage=%s' % (quote_attrib(self.conditioning_voltage), )) + outfile.write(' conditioningVoltage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.conditioning_voltage), input_name='conditioningVoltage')), )) if self.testing_voltage is not None and 'testing_voltage' not in already_processed: already_processed.add('testing_voltage') - outfile.write(' testingVoltage=%s' % (quote_attrib(self.testing_voltage), )) + outfile.write(' testingVoltage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.testing_voltage), input_name='testingVoltage')), )) if self.return_voltage is not None and 'return_voltage' not in already_processed: already_processed.add('return_voltage') - outfile.write(' returnVoltage=%s' % (quote_attrib(self.return_voltage), )) + outfile.write(' returnVoltage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.return_voltage), input_name='returnVoltage')), )) if self.simple_series_resistance is not None and 'simple_series_resistance' not in already_processed: already_processed.add('simple_series_resistance') - outfile.write(' simpleSeriesResistance=%s' % (quote_attrib(self.simple_series_resistance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VoltageClampTriple', fromsubclass_=False, pretty_print=True): - super(VoltageClampTriple, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' simpleSeriesResistance=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.simple_series_resistance), input_name='simpleSeriesResistance')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VoltageClampTriple', fromsubclass_=False, pretty_print=True): + super(VoltageClampTriple, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('active', node) if value is not None and 'active' not in already_processed: already_processed.add('active') - try: - self.active = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (active): %s' % exp) + value = self.gds_parse_double(value, node, 'active') + self.active = value self.validate_ZeroOrOne(self.active) # validate type ZeroOrOne value = find_attr_value_('delay', node) if value is not None and 'delay' not in already_processed: @@ -9323,30 +12422,38 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('simpleSeriesResistance') self.simple_series_resistance = value self.validate_Nml2Quantity_resistance(self.simple_series_resistance) # validate type Nml2Quantity_resistance - super(VoltageClampTriple, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(VoltageClampTriple, self).buildChildren(child_, node, nodeName_, True) + super(VoltageClampTriple, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(VoltageClampTriple, self)._buildChildren(child_, node, nodeName_, True) pass # end class VoltageClampTriple class VoltageClamp(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('target_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('simple_series_resistance', 'Nml2Quantity_resistance', 0, 0, {'use': u'required'}), + MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'delay'}), + MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'duration'}), + MemberSpec_('target_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'target_voltage'}), + MemberSpec_('simple_series_resistance', 'Nml2Quantity_resistance', 0, 0, {'use': 'required', 'name': 'simple_series_resistance'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, target_voltage=None, simple_series_resistance=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, target_voltage=None, simple_series_resistance=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(VoltageClamp, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("VoltageClamp"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.target_voltage = _cast(None, target_voltage) + self.target_voltage_nsprefix_ = None self.simple_series_resistance = _cast(None, simple_series_resistance) + self.simple_series_resistance_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -9358,30 +12465,62 @@ def factory(*args_, **kwargs_): else: return VoltageClamp(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_delay(self): + return self.delay + def set_delay(self, delay): + self.delay = delay + def get_duration(self): + return self.duration + def set_duration(self, duration): + self.duration = duration + def get_targetVoltage(self): + return self.target_voltage + def set_targetVoltage(self, target_voltage): + self.target_voltage = target_voltage + def get_simpleSeriesResistance(self): + return self.simple_series_resistance + def set_simpleSeriesResistance(self, simple_series_resistance): + self.simple_series_resistance = simple_series_resistance def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] def validate_Nml2Quantity_resistance(self, value): # Validate type Nml2Quantity_resistance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_resistance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_resistance_patterns_, )) - validate_Nml2Quantity_resistance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm|kohm|Mohm)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_resistance_patterns_, )) + validate_Nml2Quantity_resistance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm|kohm|Mohm))$']] + def _hasContent(self): if ( - super(VoltageClamp, self).hasContent_() + super(VoltageClamp, self)._hasContent() ): return True else: @@ -9394,43 +12533,49 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='V eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'VoltageClamp': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClamp') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClamp') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VoltageClamp', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VoltageClamp', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VoltageClamp'): - super(VoltageClamp, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClamp') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VoltageClamp'): + super(VoltageClamp, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClamp') if self.delay is not None and 'delay' not in already_processed: already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) + outfile.write(' delay=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.delay), input_name='delay')), )) if self.duration is not None and 'duration' not in already_processed: already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) + outfile.write(' duration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.duration), input_name='duration')), )) if self.target_voltage is not None and 'target_voltage' not in already_processed: already_processed.add('target_voltage') - outfile.write(' targetVoltage=%s' % (quote_attrib(self.target_voltage), )) + outfile.write(' targetVoltage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.target_voltage), input_name='targetVoltage')), )) if self.simple_series_resistance is not None and 'simple_series_resistance' not in already_processed: already_processed.add('simple_series_resistance') - outfile.write(' simpleSeriesResistance=%s' % (quote_attrib(self.simple_series_resistance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VoltageClamp', fromsubclass_=False, pretty_print=True): - super(VoltageClamp, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' simpleSeriesResistance=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.simple_series_resistance), input_name='simpleSeriesResistance')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VoltageClamp', fromsubclass_=False, pretty_print=True): + super(VoltageClamp, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('delay', node) if value is not None and 'delay' not in already_processed: already_processed.add('delay') @@ -9451,37 +12596,44 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('simpleSeriesResistance') self.simple_series_resistance = value self.validate_Nml2Quantity_resistance(self.simple_series_resistance) # validate type Nml2Quantity_resistance - super(VoltageClamp, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(VoltageClamp, self).buildChildren(child_, node, nodeName_, True) + super(VoltageClamp, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(VoltageClamp, self)._buildChildren(child_, node, nodeName_, True) pass # end class VoltageClamp class CompoundInputDL(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('pulse_generator_dls', 'PulseGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PulseGeneratorDL', u'name': u'pulseGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('sine_generator_dls', 'SineGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SineGeneratorDL', u'name': u'sineGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('ramp_generator_dls', 'RampGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'RampGeneratorDL', u'name': u'rampGeneratorDL', u'minOccurs': u'0'}, None), + MemberSpec_('pulse_generator_dls', 'PulseGeneratorDL', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'pulseGeneratorDL', 'type': 'PulseGeneratorDL'}, None), + MemberSpec_('sine_generator_dls', 'SineGeneratorDL', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'sineGeneratorDL', 'type': 'SineGeneratorDL'}, None), + MemberSpec_('ramp_generator_dls', 'RampGeneratorDL', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'rampGeneratorDL', 'type': 'RampGeneratorDL'}, None), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, pulse_generator_dls=None, sine_generator_dls=None, ramp_generator_dls=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, pulse_generator_dls=None, sine_generator_dls=None, ramp_generator_dls=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(CompoundInputDL, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("CompoundInputDL"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) if pulse_generator_dls is None: self.pulse_generator_dls = [] else: self.pulse_generator_dls = pulse_generator_dls + self.pulse_generator_dls_nsprefix_ = None if sine_generator_dls is None: self.sine_generator_dls = [] else: self.sine_generator_dls = sine_generator_dls + self.sine_generator_dls_nsprefix_ = None if ramp_generator_dls is None: self.ramp_generator_dls = [] else: self.ramp_generator_dls = ramp_generator_dls + self.ramp_generator_dls_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -9493,17 +12645,51 @@ def factory(*args_, **kwargs_): else: return CompoundInputDL(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_pulseGeneratorDL(self): + return self.pulse_generator_dls + def set_pulseGeneratorDL(self, pulse_generator_dls): + self.pulse_generator_dls = pulse_generator_dls + def add_pulseGeneratorDL(self, value): + self.pulse_generator_dls.append(value) + def insert_pulseGeneratorDL_at(self, index, value): + self.pulse_generator_dls.insert(index, value) + def replace_pulseGeneratorDL_at(self, index, value): + self.pulse_generator_dls[index] = value + def get_sineGeneratorDL(self): + return self.sine_generator_dls + def set_sineGeneratorDL(self, sine_generator_dls): + self.sine_generator_dls = sine_generator_dls + def add_sineGeneratorDL(self, value): + self.sine_generator_dls.append(value) + def insert_sineGeneratorDL_at(self, index, value): + self.sine_generator_dls.insert(index, value) + def replace_sineGeneratorDL_at(self, index, value): + self.sine_generator_dls[index] = value + def get_rampGeneratorDL(self): + return self.ramp_generator_dls + def set_rampGeneratorDL(self, ramp_generator_dls): + self.ramp_generator_dls = ramp_generator_dls + def add_rampGeneratorDL(self, value): + self.ramp_generator_dls.append(value) + def insert_rampGeneratorDL_at(self, index, value): + self.ramp_generator_dls.insert(index, value) + def replace_rampGeneratorDL_at(self, index, value): + self.ramp_generator_dls[index] = value + def _hasContent(self): if ( self.pulse_generator_dls or self.sine_generator_dls or self.ramp_generator_dls or - super(CompoundInputDL, self).hasContent_() + super(CompoundInputDL, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CompoundInputDL', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='CompoundInputDL', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('CompoundInputDL') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -9511,86 +12697,102 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'CompoundInputDL': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInputDL') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInputDL') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CompoundInputDL', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CompoundInputDL', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CompoundInputDL'): - super(CompoundInputDL, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInputDL') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CompoundInputDL', fromsubclass_=False, pretty_print=True): - super(CompoundInputDL, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CompoundInputDL'): + super(CompoundInputDL, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInputDL') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='CompoundInputDL', fromsubclass_=False, pretty_print=True): + super(CompoundInputDL, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for pulseGeneratorDL_ in self.pulse_generator_dls: + namespaceprefix_ = self.pulse_generator_dls_nsprefix_ + ':' if (UseCapturedNS_ and self.pulse_generator_dls_nsprefix_) else '' pulseGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pulseGeneratorDL', pretty_print=pretty_print) for sineGeneratorDL_ in self.sine_generator_dls: + namespaceprefix_ = self.sine_generator_dls_nsprefix_ + ':' if (UseCapturedNS_ and self.sine_generator_dls_nsprefix_) else '' sineGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='sineGeneratorDL', pretty_print=pretty_print) for rampGeneratorDL_ in self.ramp_generator_dls: + namespaceprefix_ = self.ramp_generator_dls_nsprefix_ + ':' if (UseCapturedNS_ and self.ramp_generator_dls_nsprefix_) else '' rampGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='rampGeneratorDL', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(CompoundInputDL, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(CompoundInputDL, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'pulseGeneratorDL': obj_ = PulseGeneratorDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.pulse_generator_dls.append(obj_) obj_.original_tagname_ = 'pulseGeneratorDL' elif nodeName_ == 'sineGeneratorDL': obj_ = SineGeneratorDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sine_generator_dls.append(obj_) obj_.original_tagname_ = 'sineGeneratorDL' elif nodeName_ == 'rampGeneratorDL': obj_ = RampGeneratorDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ramp_generator_dls.append(obj_) obj_.original_tagname_ = 'rampGeneratorDL' - super(CompoundInputDL, self).buildChildren(child_, node, nodeName_, True) + super(CompoundInputDL, self)._buildChildren(child_, node, nodeName_, True) # end class CompoundInputDL class CompoundInput(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('pulse_generators', 'PulseGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PulseGenerator', u'name': u'pulseGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('sine_generators', 'SineGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SineGenerator', u'name': u'sineGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('ramp_generators', 'RampGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'RampGenerator', u'name': u'rampGenerator', u'minOccurs': u'0'}, None), + MemberSpec_('pulse_generators', 'PulseGenerator', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'pulseGenerator', 'type': 'PulseGenerator'}, None), + MemberSpec_('sine_generators', 'SineGenerator', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'sineGenerator', 'type': 'SineGenerator'}, None), + MemberSpec_('ramp_generators', 'RampGenerator', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'rampGenerator', 'type': 'RampGenerator'}, None), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, pulse_generators=None, sine_generators=None, ramp_generators=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, pulse_generators=None, sine_generators=None, ramp_generators=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(CompoundInput, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("CompoundInput"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) if pulse_generators is None: self.pulse_generators = [] else: self.pulse_generators = pulse_generators + self.pulse_generators_nsprefix_ = None if sine_generators is None: self.sine_generators = [] else: self.sine_generators = sine_generators + self.sine_generators_nsprefix_ = None if ramp_generators is None: self.ramp_generators = [] else: self.ramp_generators = ramp_generators + self.ramp_generators_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -9602,17 +12804,51 @@ def factory(*args_, **kwargs_): else: return CompoundInput(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_pulseGenerator(self): + return self.pulse_generators + def set_pulseGenerator(self, pulse_generators): + self.pulse_generators = pulse_generators + def add_pulseGenerator(self, value): + self.pulse_generators.append(value) + def insert_pulseGenerator_at(self, index, value): + self.pulse_generators.insert(index, value) + def replace_pulseGenerator_at(self, index, value): + self.pulse_generators[index] = value + def get_sineGenerator(self): + return self.sine_generators + def set_sineGenerator(self, sine_generators): + self.sine_generators = sine_generators + def add_sineGenerator(self, value): + self.sine_generators.append(value) + def insert_sineGenerator_at(self, index, value): + self.sine_generators.insert(index, value) + def replace_sineGenerator_at(self, index, value): + self.sine_generators[index] = value + def get_rampGenerator(self): + return self.ramp_generators + def set_rampGenerator(self, ramp_generators): + self.ramp_generators = ramp_generators + def add_rampGenerator(self, value): + self.ramp_generators.append(value) + def insert_rampGenerator_at(self, index, value): + self.ramp_generators.insert(index, value) + def replace_rampGenerator_at(self, index, value): + self.ramp_generators[index] = value + def _hasContent(self): if ( self.pulse_generators or self.sine_generators or self.ramp_generators or - super(CompoundInput, self).hasContent_() + super(CompoundInput, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CompoundInput', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='CompoundInput', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('CompoundInput') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -9620,81 +12856,99 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'CompoundInput': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInput') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInput') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CompoundInput', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CompoundInput', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CompoundInput'): - super(CompoundInput, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInput') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CompoundInput', fromsubclass_=False, pretty_print=True): - super(CompoundInput, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CompoundInput'): + super(CompoundInput, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInput') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='CompoundInput', fromsubclass_=False, pretty_print=True): + super(CompoundInput, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for pulseGenerator_ in self.pulse_generators: + namespaceprefix_ = self.pulse_generators_nsprefix_ + ':' if (UseCapturedNS_ and self.pulse_generators_nsprefix_) else '' pulseGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pulseGenerator', pretty_print=pretty_print) for sineGenerator_ in self.sine_generators: + namespaceprefix_ = self.sine_generators_nsprefix_ + ':' if (UseCapturedNS_ and self.sine_generators_nsprefix_) else '' sineGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='sineGenerator', pretty_print=pretty_print) for rampGenerator_ in self.ramp_generators: + namespaceprefix_ = self.ramp_generators_nsprefix_ + ':' if (UseCapturedNS_ and self.ramp_generators_nsprefix_) else '' rampGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='rampGenerator', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(CompoundInput, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(CompoundInput, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'pulseGenerator': obj_ = PulseGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.pulse_generators.append(obj_) obj_.original_tagname_ = 'pulseGenerator' elif nodeName_ == 'sineGenerator': obj_ = SineGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sine_generators.append(obj_) obj_.original_tagname_ = 'sineGenerator' elif nodeName_ == 'rampGenerator': obj_ = RampGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ramp_generators.append(obj_) obj_.original_tagname_ = 'rampGenerator' - super(CompoundInput, self).buildChildren(child_, node, nodeName_, True) + super(CompoundInput, self)._buildChildren(child_, node, nodeName_, True) # end class CompoundInput class RampGeneratorDL(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('start_amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('finish_amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('baseline_amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'delay'}), + MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'duration'}), + MemberSpec_('start_amplitude', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'start_amplitude'}), + MemberSpec_('finish_amplitude', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'finish_amplitude'}), + MemberSpec_('baseline_amplitude', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'baseline_amplitude'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, start_amplitude=None, finish_amplitude=None, baseline_amplitude=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, start_amplitude=None, finish_amplitude=None, baseline_amplitude=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(RampGeneratorDL, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("RampGeneratorDL"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.start_amplitude = _cast(None, start_amplitude) + self.start_amplitude_nsprefix_ = None self.finish_amplitude = _cast(None, finish_amplitude) + self.finish_amplitude_nsprefix_ = None self.baseline_amplitude = _cast(None, baseline_amplitude) + self.baseline_amplitude_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -9706,23 +12960,55 @@ def factory(*args_, **kwargs_): else: return RampGeneratorDL(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_delay(self): + return self.delay + def set_delay(self, delay): + self.delay = delay + def get_duration(self): + return self.duration + def set_duration(self, duration): + self.duration = duration + def get_startAmplitude(self): + return self.start_amplitude + def set_startAmplitude(self, start_amplitude): + self.start_amplitude = start_amplitude + def get_finishAmplitude(self): + return self.finish_amplitude + def set_finishAmplitude(self, finish_amplitude): + self.finish_amplitude = finish_amplitude + def get_baselineAmplitude(self): + return self.baseline_amplitude + def set_baselineAmplitude(self, baseline_amplitude): + self.baseline_amplitude = baseline_amplitude def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_none_patterns_, )) + validate_Nml2Quantity_none_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$']] + def _hasContent(self): if ( - super(RampGeneratorDL, self).hasContent_() + super(RampGeneratorDL, self)._hasContent() ): return True else: @@ -9735,46 +13021,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='R eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'RampGeneratorDL': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGeneratorDL') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGeneratorDL') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RampGeneratorDL', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RampGeneratorDL', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RampGeneratorDL'): - super(RampGeneratorDL, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGeneratorDL') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RampGeneratorDL'): + super(RampGeneratorDL, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGeneratorDL') if self.delay is not None and 'delay' not in already_processed: already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) + outfile.write(' delay=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.delay), input_name='delay')), )) if self.duration is not None and 'duration' not in already_processed: already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) + outfile.write(' duration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.duration), input_name='duration')), )) if self.start_amplitude is not None and 'start_amplitude' not in already_processed: already_processed.add('start_amplitude') - outfile.write(' startAmplitude=%s' % (quote_attrib(self.start_amplitude), )) + outfile.write(' startAmplitude=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.start_amplitude), input_name='startAmplitude')), )) if self.finish_amplitude is not None and 'finish_amplitude' not in already_processed: already_processed.add('finish_amplitude') - outfile.write(' finishAmplitude=%s' % (quote_attrib(self.finish_amplitude), )) + outfile.write(' finishAmplitude=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.finish_amplitude), input_name='finishAmplitude')), )) if self.baseline_amplitude is not None and 'baseline_amplitude' not in already_processed: already_processed.add('baseline_amplitude') - outfile.write(' baselineAmplitude=%s' % (quote_attrib(self.baseline_amplitude), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RampGeneratorDL', fromsubclass_=False, pretty_print=True): - super(RampGeneratorDL, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' baselineAmplitude=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.baseline_amplitude), input_name='baselineAmplitude')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RampGeneratorDL', fromsubclass_=False, pretty_print=True): + super(RampGeneratorDL, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('delay', node) if value is not None and 'delay' not in already_processed: already_processed.add('delay') @@ -9800,32 +13092,41 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('baselineAmplitude') self.baseline_amplitude = value self.validate_Nml2Quantity_none(self.baseline_amplitude) # validate type Nml2Quantity_none - super(RampGeneratorDL, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(RampGeneratorDL, self).buildChildren(child_, node, nodeName_, True) + super(RampGeneratorDL, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(RampGeneratorDL, self)._buildChildren(child_, node, nodeName_, True) pass # end class RampGeneratorDL class RampGenerator(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('start_amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), - MemberSpec_('finish_amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), - MemberSpec_('baseline_amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'delay'}), + MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'duration'}), + MemberSpec_('start_amplitude', 'Nml2Quantity_current', 0, 0, {'use': 'required', 'name': 'start_amplitude'}), + MemberSpec_('finish_amplitude', 'Nml2Quantity_current', 0, 0, {'use': 'required', 'name': 'finish_amplitude'}), + MemberSpec_('baseline_amplitude', 'Nml2Quantity_current', 0, 0, {'use': 'required', 'name': 'baseline_amplitude'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, start_amplitude=None, finish_amplitude=None, baseline_amplitude=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, start_amplitude=None, finish_amplitude=None, baseline_amplitude=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(RampGenerator, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("RampGenerator"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.start_amplitude = _cast(None, start_amplitude) + self.start_amplitude_nsprefix_ = None self.finish_amplitude = _cast(None, finish_amplitude) + self.finish_amplitude_nsprefix_ = None self.baseline_amplitude = _cast(None, baseline_amplitude) + self.baseline_amplitude_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -9837,23 +13138,55 @@ def factory(*args_, **kwargs_): else: return RampGenerator(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_delay(self): + return self.delay + def set_delay(self, delay): + self.delay = delay + def get_duration(self): + return self.duration + def set_duration(self, duration): + self.duration = duration + def get_startAmplitude(self): + return self.start_amplitude + def set_startAmplitude(self, start_amplitude): + self.start_amplitude = start_amplitude + def get_finishAmplitude(self): + return self.finish_amplitude + def set_finishAmplitude(self, finish_amplitude): + self.finish_amplitude = finish_amplitude + def get_baselineAmplitude(self): + return self.baseline_amplitude + def set_baselineAmplitude(self, baseline_amplitude): + self.baseline_amplitude = baseline_amplitude def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_current_patterns_, )) + validate_Nml2Quantity_current_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA))$']] + def _hasContent(self): if ( - super(RampGenerator, self).hasContent_() + super(RampGenerator, self)._hasContent() ): return True else: @@ -9866,46 +13199,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='R eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'RampGenerator': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGenerator') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGenerator') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RampGenerator', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RampGenerator', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RampGenerator'): - super(RampGenerator, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGenerator') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RampGenerator'): + super(RampGenerator, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGenerator') if self.delay is not None and 'delay' not in already_processed: already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) + outfile.write(' delay=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.delay), input_name='delay')), )) if self.duration is not None and 'duration' not in already_processed: already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) + outfile.write(' duration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.duration), input_name='duration')), )) if self.start_amplitude is not None and 'start_amplitude' not in already_processed: already_processed.add('start_amplitude') - outfile.write(' startAmplitude=%s' % (quote_attrib(self.start_amplitude), )) + outfile.write(' startAmplitude=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.start_amplitude), input_name='startAmplitude')), )) if self.finish_amplitude is not None and 'finish_amplitude' not in already_processed: already_processed.add('finish_amplitude') - outfile.write(' finishAmplitude=%s' % (quote_attrib(self.finish_amplitude), )) + outfile.write(' finishAmplitude=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.finish_amplitude), input_name='finishAmplitude')), )) if self.baseline_amplitude is not None and 'baseline_amplitude' not in already_processed: already_processed.add('baseline_amplitude') - outfile.write(' baselineAmplitude=%s' % (quote_attrib(self.baseline_amplitude), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RampGenerator', fromsubclass_=False, pretty_print=True): - super(RampGenerator, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' baselineAmplitude=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.baseline_amplitude), input_name='baselineAmplitude')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RampGenerator', fromsubclass_=False, pretty_print=True): + super(RampGenerator, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('delay', node) if value is not None and 'delay' not in already_processed: already_processed.add('delay') @@ -9931,32 +13270,41 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('baselineAmplitude') self.baseline_amplitude = value self.validate_Nml2Quantity_current(self.baseline_amplitude) # validate type Nml2Quantity_current - super(RampGenerator, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(RampGenerator, self).buildChildren(child_, node, nodeName_, True) + super(RampGenerator, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(RampGenerator, self)._buildChildren(child_, node, nodeName_, True) pass # end class RampGenerator class SineGeneratorDL(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('phase', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('period', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'delay'}), + MemberSpec_('phase', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'phase'}), + MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'duration'}), + MemberSpec_('amplitude', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'amplitude'}), + MemberSpec_('period', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'period'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, phase=None, duration=None, amplitude=None, period=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, phase=None, duration=None, amplitude=None, period=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(SineGeneratorDL, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("SineGeneratorDL"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.phase = _cast(None, phase) + self.phase_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.amplitude = _cast(None, amplitude) + self.amplitude_nsprefix_ = None self.period = _cast(None, period) + self.period_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -9968,23 +13316,55 @@ def factory(*args_, **kwargs_): else: return SineGeneratorDL(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_delay(self): + return self.delay + def set_delay(self, delay): + self.delay = delay + def get_phase(self): + return self.phase + def set_phase(self, phase): + self.phase = phase + def get_duration(self): + return self.duration + def set_duration(self, duration): + self.duration = duration + def get_amplitude(self): + return self.amplitude + def set_amplitude(self, amplitude): + self.amplitude = amplitude + def get_period(self): + return self.period + def set_period(self, period): + self.period = period def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_none_patterns_, )) + validate_Nml2Quantity_none_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$']] + def _hasContent(self): if ( - super(SineGeneratorDL, self).hasContent_() + super(SineGeneratorDL, self)._hasContent() ): return True else: @@ -9997,46 +13377,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SineGeneratorDL': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGeneratorDL') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGeneratorDL') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SineGeneratorDL', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SineGeneratorDL', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SineGeneratorDL'): - super(SineGeneratorDL, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGeneratorDL') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SineGeneratorDL'): + super(SineGeneratorDL, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGeneratorDL') if self.delay is not None and 'delay' not in already_processed: already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) + outfile.write(' delay=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.delay), input_name='delay')), )) if self.phase is not None and 'phase' not in already_processed: already_processed.add('phase') - outfile.write(' phase=%s' % (quote_attrib(self.phase), )) + outfile.write(' phase=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.phase), input_name='phase')), )) if self.duration is not None and 'duration' not in already_processed: already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) + outfile.write(' duration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.duration), input_name='duration')), )) if self.amplitude is not None and 'amplitude' not in already_processed: already_processed.add('amplitude') - outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), )) + outfile.write(' amplitude=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.amplitude), input_name='amplitude')), )) if self.period is not None and 'period' not in already_processed: already_processed.add('period') - outfile.write(' period=%s' % (quote_attrib(self.period), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SineGeneratorDL', fromsubclass_=False, pretty_print=True): - super(SineGeneratorDL, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' period=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.period), input_name='period')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SineGeneratorDL', fromsubclass_=False, pretty_print=True): + super(SineGeneratorDL, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('delay', node) if value is not None and 'delay' not in already_processed: already_processed.add('delay') @@ -10062,32 +13448,41 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('period') self.period = value self.validate_Nml2Quantity_time(self.period) # validate type Nml2Quantity_time - super(SineGeneratorDL, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SineGeneratorDL, self).buildChildren(child_, node, nodeName_, True) + super(SineGeneratorDL, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(SineGeneratorDL, self)._buildChildren(child_, node, nodeName_, True) pass # end class SineGeneratorDL class SineGenerator(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('phase', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), - MemberSpec_('period', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'delay'}), + MemberSpec_('phase', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'phase'}), + MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'duration'}), + MemberSpec_('amplitude', 'Nml2Quantity_current', 0, 0, {'use': 'required', 'name': 'amplitude'}), + MemberSpec_('period', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'period'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, phase=None, duration=None, amplitude=None, period=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, phase=None, duration=None, amplitude=None, period=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(SineGenerator, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("SineGenerator"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.phase = _cast(None, phase) + self.phase_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.amplitude = _cast(None, amplitude) + self.amplitude_nsprefix_ = None self.period = _cast(None, period) + self.period_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -10099,30 +13494,66 @@ def factory(*args_, **kwargs_): else: return SineGenerator(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_delay(self): + return self.delay + def set_delay(self, delay): + self.delay = delay + def get_phase(self): + return self.phase + def set_phase(self, phase): + self.phase = phase + def get_duration(self): + return self.duration + def set_duration(self, duration): + self.duration = duration + def get_amplitude(self): + return self.amplitude + def set_amplitude(self, amplitude): + self.amplitude = amplitude + def get_period(self): + return self.period + def set_period(self, period): + self.period = period def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_none_patterns_, )) + validate_Nml2Quantity_none_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$']] def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_current_patterns_, )) + validate_Nml2Quantity_current_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA))$']] + def _hasContent(self): if ( - super(SineGenerator, self).hasContent_() + super(SineGenerator, self)._hasContent() ): return True else: @@ -10135,46 +13566,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SineGenerator': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGenerator') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGenerator') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SineGenerator', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SineGenerator', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SineGenerator'): - super(SineGenerator, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGenerator') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SineGenerator'): + super(SineGenerator, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGenerator') if self.delay is not None and 'delay' not in already_processed: already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) + outfile.write(' delay=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.delay), input_name='delay')), )) if self.phase is not None and 'phase' not in already_processed: already_processed.add('phase') - outfile.write(' phase=%s' % (quote_attrib(self.phase), )) + outfile.write(' phase=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.phase), input_name='phase')), )) if self.duration is not None and 'duration' not in already_processed: already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) + outfile.write(' duration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.duration), input_name='duration')), )) if self.amplitude is not None and 'amplitude' not in already_processed: already_processed.add('amplitude') - outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), )) + outfile.write(' amplitude=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.amplitude), input_name='amplitude')), )) if self.period is not None and 'period' not in already_processed: already_processed.add('period') - outfile.write(' period=%s' % (quote_attrib(self.period), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SineGenerator', fromsubclass_=False, pretty_print=True): - super(SineGenerator, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' period=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.period), input_name='period')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SineGenerator', fromsubclass_=False, pretty_print=True): + super(SineGenerator, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('delay', node) if value is not None and 'delay' not in already_processed: already_processed.add('delay') @@ -10200,30 +13637,38 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('period') self.period = value self.validate_Nml2Quantity_time(self.period) # validate type Nml2Quantity_time - super(SineGenerator, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SineGenerator, self).buildChildren(child_, node, nodeName_, True) + super(SineGenerator, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(SineGenerator, self)._buildChildren(child_, node, nodeName_, True) pass # end class SineGenerator class PulseGeneratorDL(Standalone): - """Generates a constant current pulse of a certain amplitude (non - dimensional) for a specified duration after a delay.""" + """PulseGeneratorDL -- Generates a constant current pulse of a certain amplitude (non dimensional) for a specified duration after a delay. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'delay'}), + MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'duration'}), + MemberSpec_('amplitude', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'amplitude'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, amplitude=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, amplitude=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(PulseGeneratorDL, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("PulseGeneratorDL"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.amplitude = _cast(None, amplitude) + self.amplitude_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -10235,23 +13680,47 @@ def factory(*args_, **kwargs_): else: return PulseGeneratorDL(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_delay(self): + return self.delay + def set_delay(self, delay): + self.delay = delay + def get_duration(self): + return self.duration + def set_duration(self, duration): + self.duration = duration + def get_amplitude(self): + return self.amplitude + def set_amplitude(self, amplitude): + self.amplitude = amplitude def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_none_patterns_, )) + validate_Nml2Quantity_none_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$']] + def _hasContent(self): if ( - super(PulseGeneratorDL, self).hasContent_() + super(PulseGeneratorDL, self)._hasContent() ): return True else: @@ -10264,40 +13733,46 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='P eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'PulseGeneratorDL': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGeneratorDL') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGeneratorDL') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PulseGeneratorDL', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PulseGeneratorDL', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PulseGeneratorDL'): - super(PulseGeneratorDL, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGeneratorDL') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PulseGeneratorDL'): + super(PulseGeneratorDL, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGeneratorDL') if self.delay is not None and 'delay' not in already_processed: already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) + outfile.write(' delay=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.delay), input_name='delay')), )) if self.duration is not None and 'duration' not in already_processed: already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) + outfile.write(' duration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.duration), input_name='duration')), )) if self.amplitude is not None and 'amplitude' not in already_processed: already_processed.add('amplitude') - outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PulseGeneratorDL', fromsubclass_=False, pretty_print=True): - super(PulseGeneratorDL, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' amplitude=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.amplitude), input_name='amplitude')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PulseGeneratorDL', fromsubclass_=False, pretty_print=True): + super(PulseGeneratorDL, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('delay', node) if value is not None and 'delay' not in already_processed: already_processed.add('delay') @@ -10313,30 +13788,38 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('amplitude') self.amplitude = value self.validate_Nml2Quantity_none(self.amplitude) # validate type Nml2Quantity_none - super(PulseGeneratorDL, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(PulseGeneratorDL, self).buildChildren(child_, node, nodeName_, True) + super(PulseGeneratorDL, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(PulseGeneratorDL, self)._buildChildren(child_, node, nodeName_, True) pass # end class PulseGeneratorDL class PulseGenerator(Standalone): - """Generates a constant current pulse of a certain amplitude (with - dimensions for current) for a specified duration after a delay.""" + """PulseGenerator -- Generates a constant current pulse of a certain amplitude (with dimensions for current) for a specified duration after a delay. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'delay'}), + MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'duration'}), + MemberSpec_('amplitude', 'Nml2Quantity_current', 0, 0, {'use': 'required', 'name': 'amplitude'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, amplitude=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, amplitude=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(PulseGenerator, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("PulseGenerator"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.amplitude = _cast(None, amplitude) + self.amplitude_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -10348,23 +13831,47 @@ def factory(*args_, **kwargs_): else: return PulseGenerator(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_delay(self): + return self.delay + def set_delay(self, delay): + self.delay = delay + def get_duration(self): + return self.duration + def set_duration(self, duration): + self.duration = duration + def get_amplitude(self): + return self.amplitude + def set_amplitude(self, amplitude): + self.amplitude = amplitude def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_current_patterns_, )) + validate_Nml2Quantity_current_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA))$']] + def _hasContent(self): if ( - super(PulseGenerator, self).hasContent_() + super(PulseGenerator, self)._hasContent() ): return True else: @@ -10377,40 +13884,46 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='P eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'PulseGenerator': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGenerator') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGenerator') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PulseGenerator', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PulseGenerator', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PulseGenerator'): - super(PulseGenerator, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGenerator') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PulseGenerator'): + super(PulseGenerator, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGenerator') if self.delay is not None and 'delay' not in already_processed: already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) + outfile.write(' delay=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.delay), input_name='delay')), )) if self.duration is not None and 'duration' not in already_processed: already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) + outfile.write(' duration=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.duration), input_name='duration')), )) if self.amplitude is not None and 'amplitude' not in already_processed: already_processed.add('amplitude') - outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PulseGenerator', fromsubclass_=False, pretty_print=True): - super(PulseGenerator, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' amplitude=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.amplitude), input_name='amplitude')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PulseGenerator', fromsubclass_=False, pretty_print=True): + super(PulseGenerator, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('delay', node) if value is not None and 'delay' not in already_processed: already_processed.add('delay') @@ -10426,27 +13939,33 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('amplitude') self.amplitude = value self.validate_Nml2Quantity_current(self.amplitude) # validate type Nml2Quantity_current - super(PulseGenerator, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(PulseGenerator, self).buildChildren(child_, node, nodeName_, True) + super(PulseGenerator, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(PulseGenerator, self)._buildChildren(child_, node, nodeName_, True) pass # end class PulseGenerator class ReactionScheme(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('source', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('type', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_('source', 'xs:string', 0, 0, {'use': 'required', 'name': 'source'}), + MemberSpec_('type', 'xs:string', 0, 0, {'use': 'required', 'name': 'type'}), + MemberSpec_('__ANY__', '__ANY__', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'processContents': 'skip'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, source=None, type=None, anytypeobjs_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, source=None, type=None, anytypeobjs_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ReactionScheme, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ReactionScheme"), self).__init__(neuro_lex_id, id, **kwargs_) self.source = _cast(None, source) + self.source_nsprefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None if anytypeobjs_ is None: self.anytypeobjs_ = [] else: @@ -10462,15 +13981,31 @@ def factory(*args_, **kwargs_): else: return ReactionScheme(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_anytypeobjs_(self): return self.anytypeobjs_ + def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_ + def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value) + def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value + def get_source(self): + return self.source + def set_source(self, source): + self.source = source + def get_type(self): + return self.type + def set_type(self, type): + self.type = type + def _hasContent(self): if ( self.anytypeobjs_ or - super(ReactionScheme, self).hasContent_() + super(ReactionScheme, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReactionScheme', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ReactionScheme', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReactionScheme') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -10478,43 +14013,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='R eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ReactionScheme': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReactionScheme') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReactionScheme') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ReactionScheme', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ReactionScheme', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ReactionScheme'): - super(ReactionScheme, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReactionScheme') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ReactionScheme'): + super(ReactionScheme, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReactionScheme') if self.source is not None and 'source' not in already_processed: already_processed.add('source') outfile.write(' source=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.source), input_name='source')), )) if self.type is not None and 'type' not in already_processed: already_processed.add('type') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReactionScheme', fromsubclass_=False, pretty_print=True): - super(ReactionScheme, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ReactionScheme', fromsubclass_=False, pretty_print=True): + super(ReactionScheme, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' - for obj_ in self.anytypeobjs_: - obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if not fromsubclass_: + for obj_ in self.anytypeobjs_: + showIndent(outfile, level, pretty_print) + outfile.write(obj_) + outfile.write('\n') + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('source', node) if value is not None and 'source' not in already_processed: already_processed.add('source') @@ -10523,29 +14067,33 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'type' not in already_processed: already_processed.add('type') self.type = value - super(ReactionScheme, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'ReactionScheme') - if obj_ is not None: - self.add_anytypeobjs_(obj_) - super(ReactionScheme, self).buildChildren(child_, node, nodeName_, True) + super(ReactionScheme, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + content_ = self.gds_build_any(child_, 'ReactionScheme') + self.add_anytypeobjs_(content_) + super(ReactionScheme, self)._buildChildren(child_, node, nodeName_, True) # end class ReactionScheme class ExtracellularProperties(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('species', 'Species', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Species', u'name': u'species', u'minOccurs': u'0'}, None), + MemberSpec_('species', 'Species', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'species', 'type': 'Species'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, species=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, species=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ExtracellularProperties, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ExtracellularProperties"), self).__init__(neuro_lex_id, id, **kwargs_) if species is None: self.species = [] else: self.species = species + self.species_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -10557,15 +14105,29 @@ def factory(*args_, **kwargs_): else: return ExtracellularProperties(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_species(self): + return self.species + def set_species(self, species): + self.species = species + def add_species(self, value): + self.species.append(value) + def insert_species_at(self, index, value): + self.species.insert(index, value) + def replace_species_at(self, index, value): + self.species[index] = value + def _hasContent(self): if ( self.species or - super(ExtracellularProperties, self).hasContent_() + super(ExtracellularProperties, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtracellularProperties', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ExtracellularProperties', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtracellularProperties') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -10573,76 +14135,91 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ExtracellularProperties': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtracellularProperties') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtracellularProperties') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtracellularProperties', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtracellularProperties', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtracellularProperties'): - super(ExtracellularProperties, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtracellularProperties') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtracellularProperties', fromsubclass_=False, pretty_print=True): - super(ExtracellularProperties, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtracellularProperties'): + super(ExtracellularProperties, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtracellularProperties') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ExtracellularProperties', fromsubclass_=False, pretty_print=True): + super(ExtracellularProperties, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for species_ in self.species: + namespaceprefix_ = self.species_nsprefix_ + ':' if (UseCapturedNS_ and self.species_nsprefix_) else '' species_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='species', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ExtracellularProperties, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(ExtracellularProperties, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'species': obj_ = Species.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.species.append(obj_) obj_.original_tagname_ = 'species' - super(ExtracellularProperties, self).buildChildren(child_, node, nodeName_, True) + super(ExtracellularProperties, self)._buildChildren(child_, node, nodeName_, True) # end class ExtracellularProperties class ChannelDensityGHK2(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('cond_density', 'Nml2Quantity_conductanceDensity', 0, 1, {'use': u'optional'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion_channel'}), + MemberSpec_('cond_density', 'Nml2Quantity_conductanceDensity', 0, 1, {'use': 'optional', 'name': 'cond_density'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segment_groups'}), + MemberSpec_('segments', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segments'}), + MemberSpec_('ion', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion'}), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, segment_groups='all', segments=None, ion=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, segment_groups='all', segments=None, ion=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityGHK2, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ChannelDensityGHK2"), self).__init__(neuro_lex_id, id, **kwargs_) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.cond_density = _cast(None, cond_density) + self.cond_density_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None self.segments = _cast(None, segments) + self.segments_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -10654,23 +14231,55 @@ def factory(*args_, **kwargs_): else: return ChannelDensityGHK2(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ionChannel(self): + return self.ion_channel + def set_ionChannel(self, ion_channel): + self.ion_channel = ion_channel + def get_condDensity(self): + return self.cond_density + def set_condDensity(self, cond_density): + self.cond_density = cond_density + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups + def get_segment(self): + return self.segments + def set_segment(self, segments): + self.segments = segments + def get_ion(self): + return self.ion + def set_ion(self, ion): + self.ion = ion def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_conductanceDensity(self, value): # Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductanceDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) - validate_Nml2Quantity_conductanceDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) + validate_Nml2Quantity_conductanceDensity_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2))$']] + def _hasContent(self): if ( - super(ChannelDensityGHK2, self).hasContent_() + super(ChannelDensityGHK2, self)._hasContent() ): return True else: @@ -10683,46 +14292,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ChannelDensityGHK2': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK2') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK2') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityGHK2', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityGHK2', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityGHK2'): - super(ChannelDensityGHK2, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK2') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityGHK2'): + super(ChannelDensityGHK2, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK2') if self.ion_channel is not None and 'ion_channel' not in already_processed: already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) + outfile.write(' ionChannel=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion_channel), input_name='ionChannel')), )) if self.cond_density is not None and 'cond_density' not in already_processed: already_processed.add('cond_density') - outfile.write(' condDensity=%s' % (quote_attrib(self.cond_density), )) + outfile.write(' condDensity=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.cond_density), input_name='condDensity')), )) if self.segment_groups != "all" and 'segment_groups' not in already_processed: already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) + outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) if self.segments is not None and 'segments' not in already_processed: already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) + outfile.write(' segment=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segments), input_name='segment')), )) if self.ion is not None and 'ion' not in already_processed: already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityGHK2', fromsubclass_=False, pretty_print=True): - super(ChannelDensityGHK2, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' ion=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion), input_name='ion')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityGHK2', fromsubclass_=False, pretty_print=True): + super(ChannelDensityGHK2, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('ionChannel', node) if value is not None and 'ionChannel' not in already_processed: already_processed.add('ionChannel') @@ -10748,41 +14363,49 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('ion') self.ion = value self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityGHK2, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ChannelDensityGHK2, self).buildChildren(child_, node, nodeName_, True) + super(ChannelDensityGHK2, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ChannelDensityGHK2, self)._buildChildren(child_, node, nodeName_, True) pass # end class ChannelDensityGHK2 class ChannelDensityGHK(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('permeability', 'Nml2Quantity_permeability', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion_channel'}), + MemberSpec_('permeability', 'Nml2Quantity_permeability', 0, 0, {'use': 'required', 'name': 'permeability'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segment_groups'}), + MemberSpec_('segments', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segments'}), + MemberSpec_('ion', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion'}), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, permeability=None, segment_groups='all', segments=None, ion=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, permeability=None, segment_groups='all', segments=None, ion=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityGHK, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ChannelDensityGHK"), self).__init__(neuro_lex_id, id, **kwargs_) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.permeability = _cast(None, permeability) + self.permeability_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None self.segments = _cast(None, segments) + self.segments_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -10794,23 +14417,55 @@ def factory(*args_, **kwargs_): else: return ChannelDensityGHK(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ionChannel(self): + return self.ion_channel + def set_ionChannel(self, ion_channel): + self.ion_channel = ion_channel + def get_permeability(self): + return self.permeability + def set_permeability(self, permeability): + self.permeability = permeability + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups + def get_segment(self): + return self.segments + def set_segment(self, segments): + self.segments = segments + def get_ion(self): + return self.ion + def set_ion(self, ion): + self.ion = ion def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_permeability(self, value): # Validate type Nml2Quantity_permeability, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_permeability_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_permeability_patterns_, )) - validate_Nml2Quantity_permeability_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m_per_s| um_per_ms|cm_per_s|cm_per_ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_permeability_patterns_, )) + validate_Nml2Quantity_permeability_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m_per_s| um_per_ms|cm_per_s|cm_per_ms))$']] + def _hasContent(self): if ( - super(ChannelDensityGHK, self).hasContent_() + super(ChannelDensityGHK, self)._hasContent() ): return True else: @@ -10823,46 +14478,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ChannelDensityGHK': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityGHK', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityGHK', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityGHK'): - super(ChannelDensityGHK, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityGHK'): + super(ChannelDensityGHK, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK') if self.ion_channel is not None and 'ion_channel' not in already_processed: already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) + outfile.write(' ionChannel=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion_channel), input_name='ionChannel')), )) if self.permeability is not None and 'permeability' not in already_processed: already_processed.add('permeability') - outfile.write(' permeability=%s' % (quote_attrib(self.permeability), )) + outfile.write(' permeability=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.permeability), input_name='permeability')), )) if self.segment_groups != "all" and 'segment_groups' not in already_processed: already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) + outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) if self.segments is not None and 'segments' not in already_processed: already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) + outfile.write(' segment=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segments), input_name='segment')), )) if self.ion is not None and 'ion' not in already_processed: already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityGHK', fromsubclass_=False, pretty_print=True): - super(ChannelDensityGHK, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' ion=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion), input_name='ion')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityGHK', fromsubclass_=False, pretty_print=True): + super(ChannelDensityGHK, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('ionChannel', node) if value is not None and 'ionChannel' not in already_processed: already_processed.add('ionChannel') @@ -10888,46 +14549,55 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('ion') self.ion = value self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityGHK, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ChannelDensityGHK, self).buildChildren(child_, node, nodeName_, True) + super(ChannelDensityGHK, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ChannelDensityGHK, self)._buildChildren(child_, node, nodeName_, True) pass # end class ChannelDensityGHK class ChannelDensityNernst(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('cond_density', 'Nml2Quantity_conductanceDensity', 0, 1, {'use': u'optional'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion_channel'}), + MemberSpec_('cond_density', 'Nml2Quantity_conductanceDensity', 0, 1, {'use': 'optional', 'name': 'cond_density'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segment_groups'}), + MemberSpec_('segments', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segments'}), + MemberSpec_('ion', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion'}), + MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'variableParameter', 'type': 'VariableParameter'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNernst, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ChannelDensityNernst"), self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.cond_density = _cast(None, cond_density) + self.cond_density_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None self.segments = _cast(None, segments) + self.segments_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + self.variable_parameters_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -10940,29 +14610,73 @@ def factory(*args_, **kwargs_): else: return ChannelDensityNernst(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_variableParameter(self): + return self.variable_parameters + def set_variableParameter(self, variable_parameters): + self.variable_parameters = variable_parameters + def add_variableParameter(self, value): + self.variable_parameters.append(value) + def insert_variableParameter_at(self, index, value): + self.variable_parameters.insert(index, value) + def replace_variableParameter_at(self, index, value): + self.variable_parameters[index] = value + def get_ionChannel(self): + return self.ion_channel + def set_ionChannel(self, ion_channel): + self.ion_channel = ion_channel + def get_condDensity(self): + return self.cond_density + def set_condDensity(self, cond_density): + self.cond_density = cond_density + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups + def get_segment(self): + return self.segments + def set_segment(self, segments): + self.segments = segments + def get_ion(self): + return self.ion + def set_ion(self, ion): + self.ion = ion + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_conductanceDensity(self, value): # Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductanceDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) - validate_Nml2Quantity_conductanceDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) + validate_Nml2Quantity_conductanceDensity_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2))$']] + def _hasContent(self): if ( self.variable_parameters or - super(ChannelDensityNernst, self).hasContent_() + super(ChannelDensityNernst, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNernst', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ChannelDensityNernst', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNernst') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -10970,56 +14684,67 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ChannelDensityNernst': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernst') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernst') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNernst', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNernst', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNernst'): - super(ChannelDensityNernst, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernst') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNernst'): + super(ChannelDensityNernst, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernst') if self.ion_channel is not None and 'ion_channel' not in already_processed: already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) + outfile.write(' ionChannel=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion_channel), input_name='ionChannel')), )) if self.cond_density is not None and 'cond_density' not in already_processed: already_processed.add('cond_density') - outfile.write(' condDensity=%s' % (quote_attrib(self.cond_density), )) + outfile.write(' condDensity=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.cond_density), input_name='condDensity')), )) if self.segment_groups != "all" and 'segment_groups' not in already_processed: already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) + outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) if self.segments is not None and 'segments' not in already_processed: already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) + outfile.write(' segment=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segments), input_name='segment')), )) if self.ion is not None and 'ion' not in already_processed: already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) + outfile.write(' ion=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion), input_name='ion')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNernst', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNernst, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ChannelDensityNernst', fromsubclass_=False, pretty_print=True): + super(ChannelDensityNernst, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for variableParameter_ in self.variable_parameters: + namespaceprefix_ = self.variable_parameters_nsprefix_ + ':' if (UseCapturedNS_ and self.variable_parameters_nsprefix_) else '' variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('ionChannel', node) if value is not None and 'ionChannel' not in already_processed: already_processed.add('ionChannel') @@ -11049,52 +14774,62 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(ChannelDensityNernst, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(ChannelDensityNernst, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'variableParameter': obj_ = VariableParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.variable_parameters.append(obj_) obj_.original_tagname_ = 'variableParameter' - super(ChannelDensityNernst, self).buildChildren(child_, node, nodeName_, True) + super(ChannelDensityNernst, self)._buildChildren(child_, node, nodeName_, True) # end class ChannelDensityNernst class ChannelDensity(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('cond_density', 'Nml2Quantity_conductanceDensity', 0, 1, {'use': u'optional'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NonNegativeInteger', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion_channel'}), + MemberSpec_('cond_density', 'Nml2Quantity_conductanceDensity', 0, 1, {'use': 'optional', 'name': 'cond_density'}), + MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'erev'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segment_groups'}), + MemberSpec_('segments', 'NonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'segments'}), + MemberSpec_('ion', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion'}), + MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'variableParameter', 'type': 'VariableParameter'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, erev=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, erev=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensity, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ChannelDensity"), self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.cond_density = _cast(None, cond_density) + self.cond_density_nsprefix_ = None self.erev = _cast(None, erev) + self.erev_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None self.segments = _cast(int, segments) + self.segments_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + self.variable_parameters_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -11107,40 +14842,96 @@ def factory(*args_, **kwargs_): else: return ChannelDensity(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_variableParameter(self): + return self.variable_parameters + def set_variableParameter(self, variable_parameters): + self.variable_parameters = variable_parameters + def add_variableParameter(self, value): + self.variable_parameters.append(value) + def insert_variableParameter_at(self, index, value): + self.variable_parameters.insert(index, value) + def replace_variableParameter_at(self, index, value): + self.variable_parameters[index] = value + def get_ionChannel(self): + return self.ion_channel + def set_ionChannel(self, ion_channel): + self.ion_channel = ion_channel + def get_condDensity(self): + return self.cond_density + def set_condDensity(self, cond_density): + self.cond_density = cond_density + def get_erev(self): + return self.erev + def set_erev(self, erev): + self.erev = erev + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups + def get_segment(self): + return self.segments + def set_segment(self, segments): + self.segments = segments + def get_ion(self): + return self.ion + def set_ion(self, ion): + self.ion = ion + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_conductanceDensity(self, value): # Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductanceDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) - validate_Nml2Quantity_conductanceDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) + validate_Nml2Quantity_conductanceDensity_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2))$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + def _hasContent(self): if ( self.variable_parameters or - super(ChannelDensity, self).hasContent_() + super(ChannelDensity, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensity', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ChannelDensity', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensity') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -11148,59 +14939,70 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ChannelDensity': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensity') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensity') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensity', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensity', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensity'): - super(ChannelDensity, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensity') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensity'): + super(ChannelDensity, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensity') if self.ion_channel is not None and 'ion_channel' not in already_processed: already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) + outfile.write(' ionChannel=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion_channel), input_name='ionChannel')), )) if self.cond_density is not None and 'cond_density' not in already_processed: already_processed.add('cond_density') - outfile.write(' condDensity=%s' % (quote_attrib(self.cond_density), )) + outfile.write(' condDensity=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.cond_density), input_name='condDensity')), )) if self.erev is not None and 'erev' not in already_processed: already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) + outfile.write(' erev=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.erev), input_name='erev')), )) if self.segment_groups != "all" and 'segment_groups' not in already_processed: already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) + outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) if self.segments is not None and 'segments' not in already_processed: already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) + outfile.write(' segment="%s"' % self.gds_format_integer(self.segments, input_name='segment')) if self.ion is not None and 'ion' not in already_processed: already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) + outfile.write(' ion=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion), input_name='ion')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensity', fromsubclass_=False, pretty_print=True): - super(ChannelDensity, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ChannelDensity', fromsubclass_=False, pretty_print=True): + super(ChannelDensity, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for variableParameter_ in self.variable_parameters: + namespaceprefix_ = self.variable_parameters_nsprefix_ + ':' if (UseCapturedNS_ and self.variable_parameters_nsprefix_) else '' variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('ionChannel', node) if value is not None and 'ionChannel' not in already_processed: already_processed.add('ionChannel') @@ -11224,10 +15026,7 @@ def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('segment', node) if value is not None and 'segment' not in already_processed: already_processed.add('segment') - try: - self.segments = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.segments = self.gds_parse_integer(value, node, 'segment') if self.segments < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger @@ -11240,44 +15039,50 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(ChannelDensity, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(ChannelDensity, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'variableParameter': obj_ = VariableParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.variable_parameters.append(obj_) obj_.original_tagname_ = 'variableParameter' - super(ChannelDensity, self).buildChildren(child_, node, nodeName_, True) + super(ChannelDensity, self)._buildChildren(child_, node, nodeName_, True) # end class ChannelDensity class ChannelDensityNonUniformGHK(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion_channel'}), + MemberSpec_('ion', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion'}), + MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'variableParameter', 'type': 'VariableParameter'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, ion=None, variable_parameters=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, ion=None, variable_parameters=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNonUniformGHK, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ChannelDensityNonUniformGHK"), self).__init__(neuro_lex_id, id, **kwargs_) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + self.variable_parameters_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -11289,22 +15094,48 @@ def factory(*args_, **kwargs_): else: return ChannelDensityNonUniformGHK(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_variableParameter(self): + return self.variable_parameters + def set_variableParameter(self, variable_parameters): + self.variable_parameters = variable_parameters + def add_variableParameter(self, value): + self.variable_parameters.append(value) + def insert_variableParameter_at(self, index, value): + self.variable_parameters.insert(index, value) + def replace_variableParameter_at(self, index, value): + self.variable_parameters[index] = value + def get_ionChannel(self): + return self.ion_channel + def set_ionChannel(self, ion_channel): + self.ion_channel = ion_channel + def get_ion(self): + return self.ion + def set_ion(self, ion): + self.ion = ion def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( self.variable_parameters or - super(ChannelDensityNonUniformGHK, self).hasContent_() + super(ChannelDensityNonUniformGHK, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniformGHK', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ChannelDensityNonUniformGHK', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNonUniformGHK') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -11312,43 +15143,50 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ChannelDensityNonUniformGHK': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformGHK') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformGHK') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNonUniformGHK', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNonUniformGHK', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNonUniformGHK'): - super(ChannelDensityNonUniformGHK, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformGHK') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNonUniformGHK'): + super(ChannelDensityNonUniformGHK, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformGHK') if self.ion_channel is not None and 'ion_channel' not in already_processed: already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) + outfile.write(' ionChannel=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion_channel), input_name='ionChannel')), )) if self.ion is not None and 'ion' not in already_processed: already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniformGHK', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNonUniformGHK, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' ion=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion), input_name='ion')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ChannelDensityNonUniformGHK', fromsubclass_=False, pretty_print=True): + super(ChannelDensityNonUniformGHK, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for variableParameter_ in self.variable_parameters: + namespaceprefix_ = self.variable_parameters_nsprefix_ + ':' if (UseCapturedNS_ and self.variable_parameters_nsprefix_) else '' variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('ionChannel', node) if value is not None and 'ionChannel' not in already_processed: already_processed.add('ionChannel') @@ -11359,44 +15197,50 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('ion') self.ion = value self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityNonUniformGHK, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(ChannelDensityNonUniformGHK, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'variableParameter': obj_ = VariableParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.variable_parameters.append(obj_) obj_.original_tagname_ = 'variableParameter' - super(ChannelDensityNonUniformGHK, self).buildChildren(child_, node, nodeName_, True) + super(ChannelDensityNonUniformGHK, self)._buildChildren(child_, node, nodeName_, True) # end class ChannelDensityNonUniformGHK class ChannelDensityNonUniformNernst(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion_channel'}), + MemberSpec_('ion', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion'}), + MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'variableParameter', 'type': 'VariableParameter'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, ion=None, variable_parameters=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, ion=None, variable_parameters=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNonUniformNernst, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ChannelDensityNonUniformNernst"), self).__init__(neuro_lex_id, id, **kwargs_) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + self.variable_parameters_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -11408,22 +15252,48 @@ def factory(*args_, **kwargs_): else: return ChannelDensityNonUniformNernst(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_variableParameter(self): + return self.variable_parameters + def set_variableParameter(self, variable_parameters): + self.variable_parameters = variable_parameters + def add_variableParameter(self, value): + self.variable_parameters.append(value) + def insert_variableParameter_at(self, index, value): + self.variable_parameters.insert(index, value) + def replace_variableParameter_at(self, index, value): + self.variable_parameters[index] = value + def get_ionChannel(self): + return self.ion_channel + def set_ionChannel(self, ion_channel): + self.ion_channel = ion_channel + def get_ion(self): + return self.ion + def set_ion(self, ion): + self.ion = ion def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( self.variable_parameters or - super(ChannelDensityNonUniformNernst, self).hasContent_() + super(ChannelDensityNonUniformNernst, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniformNernst', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ChannelDensityNonUniformNernst', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNonUniformNernst') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -11431,43 +15301,50 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ChannelDensityNonUniformNernst': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformNernst') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformNernst') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNonUniformNernst', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNonUniformNernst', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNonUniformNernst'): - super(ChannelDensityNonUniformNernst, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformNernst') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNonUniformNernst'): + super(ChannelDensityNonUniformNernst, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformNernst') if self.ion_channel is not None and 'ion_channel' not in already_processed: already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) + outfile.write(' ionChannel=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion_channel), input_name='ionChannel')), )) if self.ion is not None and 'ion' not in already_processed: already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniformNernst', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNonUniformNernst, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' ion=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion), input_name='ion')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ChannelDensityNonUniformNernst', fromsubclass_=False, pretty_print=True): + super(ChannelDensityNonUniformNernst, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for variableParameter_ in self.variable_parameters: + namespaceprefix_ = self.variable_parameters_nsprefix_ + ':' if (UseCapturedNS_ and self.variable_parameters_nsprefix_) else '' variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('ionChannel', node) if value is not None and 'ionChannel' not in already_processed: already_processed.add('ionChannel') @@ -11478,46 +15355,53 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('ion') self.ion = value self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityNonUniformNernst, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(ChannelDensityNonUniformNernst, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'variableParameter': obj_ = VariableParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.variable_parameters.append(obj_) obj_.original_tagname_ = 'variableParameter' - super(ChannelDensityNonUniformNernst, self).buildChildren(child_, node, nodeName_, True) + super(ChannelDensityNonUniformNernst, self)._buildChildren(child_, node, nodeName_, True) # end class ChannelDensityNonUniformNernst class ChannelDensityNonUniform(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion_channel'}), + MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'erev'}), + MemberSpec_('ion', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion'}), + MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'variableParameter', 'type': 'VariableParameter'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, erev=None, ion=None, variable_parameters=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, erev=None, ion=None, variable_parameters=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNonUniform, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ChannelDensityNonUniform"), self).__init__(neuro_lex_id, id, **kwargs_) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.erev = _cast(None, erev) + self.erev_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + self.variable_parameters_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -11529,29 +15413,63 @@ def factory(*args_, **kwargs_): else: return ChannelDensityNonUniform(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_variableParameter(self): + return self.variable_parameters + def set_variableParameter(self, variable_parameters): + self.variable_parameters = variable_parameters + def add_variableParameter(self, value): + self.variable_parameters.append(value) + def insert_variableParameter_at(self, index, value): + self.variable_parameters.insert(index, value) + def replace_variableParameter_at(self, index, value): + self.variable_parameters[index] = value + def get_ionChannel(self): + return self.ion_channel + def set_ionChannel(self, ion_channel): + self.ion_channel = ion_channel + def get_erev(self): + return self.erev + def set_erev(self, erev): + self.erev = erev + def get_ion(self): + return self.ion + def set_ion(self, ion): + self.ion = ion def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] + def _hasContent(self): if ( self.variable_parameters or - super(ChannelDensityNonUniform, self).hasContent_() + super(ChannelDensityNonUniform, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniform', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ChannelDensityNonUniform', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNonUniform') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -11559,46 +15477,53 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ChannelDensityNonUniform': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniform') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniform') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNonUniform', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNonUniform', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNonUniform'): - super(ChannelDensityNonUniform, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniform') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNonUniform'): + super(ChannelDensityNonUniform, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniform') if self.ion_channel is not None and 'ion_channel' not in already_processed: already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) + outfile.write(' ionChannel=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion_channel), input_name='ionChannel')), )) if self.erev is not None and 'erev' not in already_processed: already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) + outfile.write(' erev=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.erev), input_name='erev')), )) if self.ion is not None and 'ion' not in already_processed: already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniform', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNonUniform, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' ion=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion), input_name='ion')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ChannelDensityNonUniform', fromsubclass_=False, pretty_print=True): + super(ChannelDensityNonUniform, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for variableParameter_ in self.variable_parameters: + namespaceprefix_ = self.variable_parameters_nsprefix_ + ':' if (UseCapturedNS_ and self.variable_parameters_nsprefix_) else '' variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('ionChannel', node) if value is not None and 'ionChannel' not in already_processed: already_processed.add('ionChannel') @@ -11614,52 +15539,62 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('ion') self.ion = value self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityNonUniform, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(ChannelDensityNonUniform, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'variableParameter': obj_ = VariableParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.variable_parameters.append(obj_) obj_.original_tagname_ = 'variableParameter' - super(ChannelDensityNonUniform, self).buildChildren(child_, node, nodeName_, True) + super(ChannelDensityNonUniform, self)._buildChildren(child_, node, nodeName_, True) # end class ChannelDensityNonUniform class ChannelPopulation(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('number', 'NonNegativeInteger', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NonNegativeInteger', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion_channel'}), + MemberSpec_('number', 'NonNegativeInteger', 0, 0, {'use': 'required', 'name': 'number'}), + MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'erev'}), + MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': 'optional', 'name': 'segment_groups'}), + MemberSpec_('segments', 'NonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'segments'}), + MemberSpec_('ion', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion'}), + MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'variableParameter', 'type': 'VariableParameter'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, number=None, erev=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, number=None, erev=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelPopulation, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ChannelPopulation"), self).__init__(neuro_lex_id, id, **kwargs_) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.number = _cast(int, number) + self.number_nsprefix_ = None self.erev = _cast(None, erev) + self.erev_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None self.segments = _cast(int, segments) + self.segments_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + self.variable_parameters_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -11671,33 +15606,83 @@ def factory(*args_, **kwargs_): else: return ChannelPopulation(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_variableParameter(self): + return self.variable_parameters + def set_variableParameter(self, variable_parameters): + self.variable_parameters = variable_parameters + def add_variableParameter(self, value): + self.variable_parameters.append(value) + def insert_variableParameter_at(self, index, value): + self.variable_parameters.insert(index, value) + def replace_variableParameter_at(self, index, value): + self.variable_parameters[index] = value + def get_ionChannel(self): + return self.ion_channel + def set_ionChannel(self, ion_channel): + self.ion_channel = ion_channel + def get_number(self): + return self.number + def set_number(self, number): + self.number = number + def get_erev(self): + return self.erev + def set_erev(self, erev): + self.erev = erev + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups + def get_segment(self): + return self.segments + def set_segment(self, segments): + self.segments = segments + def get_ion(self): + return self.ion + def set_ion(self, ion): + self.ion = ion def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] + def _hasContent(self): if ( self.variable_parameters or - super(ChannelPopulation, self).hasContent_() + super(ChannelPopulation, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelPopulation', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ChannelPopulation', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelPopulation') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -11705,55 +15690,62 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ChannelPopulation': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelPopulation') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelPopulation') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelPopulation', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelPopulation', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelPopulation'): - super(ChannelPopulation, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelPopulation') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelPopulation'): + super(ChannelPopulation, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelPopulation') if self.ion_channel is not None and 'ion_channel' not in already_processed: already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) + outfile.write(' ionChannel=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion_channel), input_name='ionChannel')), )) if self.number is not None and 'number' not in already_processed: already_processed.add('number') - outfile.write(' number=%s' % (quote_attrib(self.number), )) + outfile.write(' number="%s"' % self.gds_format_integer(self.number, input_name='number')) if self.erev is not None and 'erev' not in already_processed: already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) + outfile.write(' erev=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.erev), input_name='erev')), )) if self.segment_groups != "all" and 'segment_groups' not in already_processed: already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) + outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) if self.segments is not None and 'segments' not in already_processed: already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) + outfile.write(' segment="%s"' % self.gds_format_integer(self.segments, input_name='segment')) if self.ion is not None and 'ion' not in already_processed: already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelPopulation', fromsubclass_=False, pretty_print=True): - super(ChannelPopulation, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' ion=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion), input_name='ion')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ChannelPopulation', fromsubclass_=False, pretty_print=True): + super(ChannelPopulation, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for variableParameter_ in self.variable_parameters: + namespaceprefix_ = self.variable_parameters_nsprefix_ + ':' if (UseCapturedNS_ and self.variable_parameters_nsprefix_) else '' variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('ionChannel', node) if value is not None and 'ionChannel' not in already_processed: already_processed.add('ionChannel') @@ -11762,10 +15754,7 @@ def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('number', node) if value is not None and 'number' not in already_processed: already_processed.add('number') - try: - self.number = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.number = self.gds_parse_integer(value, node, 'number') if self.number < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.number) # validate type NonNegativeInteger @@ -11782,10 +15771,7 @@ def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('segment', node) if value is not None and 'segment' not in already_processed: already_processed.add('segment') - try: - self.segments = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.segments = self.gds_parse_integer(value, node, 'segment') if self.segments < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger @@ -11794,34 +15780,43 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('ion') self.ion = value self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelPopulation, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(ChannelPopulation, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'variableParameter': obj_ = VariableParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.variable_parameters.append(obj_) obj_.original_tagname_ = 'variableParameter' - super(ChannelPopulation, self).buildChildren(child_, node, nodeName_, True) + super(ChannelPopulation, self)._buildChildren(child_, node, nodeName_, True) # end class ChannelPopulation class BiophysicalProperties2CaPools(Standalone): - """Standalone element which is usually inside a single cell, but could - be outside and referenced by id.""" + """BiophysicalProperties2CaPools -- Standalone element which is usually inside a single cell, but could be outside and + referenced by id. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('membrane_properties2_ca_pools', 'MembraneProperties2CaPools', 0, 0, {u'type': u'MembraneProperties2CaPools', u'name': u'membraneProperties2CaPools'}, None), - MemberSpec_('intracellular_properties2_ca_pools', 'IntracellularProperties2CaPools', 0, 1, {u'type': u'IntracellularProperties2CaPools', u'name': u'intracellularProperties2CaPools', u'minOccurs': u'0'}, None), - MemberSpec_('extracellular_properties', 'ExtracellularProperties', 0, 1, {u'type': u'ExtracellularProperties', u'name': u'extracellularProperties', u'minOccurs': u'0'}, None), + MemberSpec_('membrane_properties2_ca_pools', 'MembraneProperties2CaPools', 0, 0, {'name': 'membraneProperties2CaPools', 'type': 'MembraneProperties2CaPools'}, None), + MemberSpec_('intracellular_properties2_ca_pools', 'IntracellularProperties2CaPools', 0, 1, {'minOccurs': '0', 'name': 'intracellularProperties2CaPools', 'type': 'IntracellularProperties2CaPools'}, None), + MemberSpec_('extracellular_properties', 'ExtracellularProperties', 0, 1, {'minOccurs': '0', 'name': 'extracellularProperties', 'type': 'ExtracellularProperties'}, None), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, membrane_properties2_ca_pools=None, intracellular_properties2_ca_pools=None, extracellular_properties=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, membrane_properties2_ca_pools=None, intracellular_properties2_ca_pools=None, extracellular_properties=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BiophysicalProperties2CaPools, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BiophysicalProperties2CaPools"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.membrane_properties2_ca_pools = membrane_properties2_ca_pools + self.membrane_properties2_ca_pools_nsprefix_ = None self.intracellular_properties2_ca_pools = intracellular_properties2_ca_pools + self.intracellular_properties2_ca_pools_nsprefix_ = None self.extracellular_properties = extracellular_properties + self.extracellular_properties_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -11833,17 +15828,33 @@ def factory(*args_, **kwargs_): else: return BiophysicalProperties2CaPools(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_membraneProperties2CaPools(self): + return self.membrane_properties2_ca_pools + def set_membraneProperties2CaPools(self, membrane_properties2_ca_pools): + self.membrane_properties2_ca_pools = membrane_properties2_ca_pools + def get_intracellularProperties2CaPools(self): + return self.intracellular_properties2_ca_pools + def set_intracellularProperties2CaPools(self, intracellular_properties2_ca_pools): + self.intracellular_properties2_ca_pools = intracellular_properties2_ca_pools + def get_extracellularProperties(self): + return self.extracellular_properties + def set_extracellularProperties(self, extracellular_properties): + self.extracellular_properties = extracellular_properties + def _hasContent(self): if ( self.membrane_properties2_ca_pools is not None or self.intracellular_properties2_ca_pools is not None or self.extracellular_properties is not None or - super(BiophysicalProperties2CaPools, self).hasContent_() + super(BiophysicalProperties2CaPools, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BiophysicalProperties2CaPools', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='BiophysicalProperties2CaPools', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('BiophysicalProperties2CaPools') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -11851,79 +15862,97 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BiophysicalProperties2CaPools': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties2CaPools') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties2CaPools') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BiophysicalProperties2CaPools', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BiophysicalProperties2CaPools', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BiophysicalProperties2CaPools'): - super(BiophysicalProperties2CaPools, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties2CaPools') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BiophysicalProperties2CaPools', fromsubclass_=False, pretty_print=True): - super(BiophysicalProperties2CaPools, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BiophysicalProperties2CaPools'): + super(BiophysicalProperties2CaPools, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties2CaPools') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='BiophysicalProperties2CaPools', fromsubclass_=False, pretty_print=True): + super(BiophysicalProperties2CaPools, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.membrane_properties2_ca_pools is not None: + namespaceprefix_ = self.membrane_properties2_ca_pools_nsprefix_ + ':' if (UseCapturedNS_ and self.membrane_properties2_ca_pools_nsprefix_) else '' self.membrane_properties2_ca_pools.export(outfile, level, namespaceprefix_, namespacedef_='', name_='membraneProperties2CaPools', pretty_print=pretty_print) if self.intracellular_properties2_ca_pools is not None: + namespaceprefix_ = self.intracellular_properties2_ca_pools_nsprefix_ + ':' if (UseCapturedNS_ and self.intracellular_properties2_ca_pools_nsprefix_) else '' self.intracellular_properties2_ca_pools.export(outfile, level, namespaceprefix_, namespacedef_='', name_='intracellularProperties2CaPools', pretty_print=pretty_print) if self.extracellular_properties is not None: + namespaceprefix_ = self.extracellular_properties_nsprefix_ + ':' if (UseCapturedNS_ and self.extracellular_properties_nsprefix_) else '' self.extracellular_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='extracellularProperties', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(BiophysicalProperties2CaPools, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(BiophysicalProperties2CaPools, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'membraneProperties2CaPools': obj_ = MembraneProperties2CaPools.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.membrane_properties2_ca_pools = obj_ obj_.original_tagname_ = 'membraneProperties2CaPools' elif nodeName_ == 'intracellularProperties2CaPools': obj_ = IntracellularProperties2CaPools.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.intracellular_properties2_ca_pools = obj_ obj_.original_tagname_ = 'intracellularProperties2CaPools' elif nodeName_ == 'extracellularProperties': obj_ = ExtracellularProperties.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.extracellular_properties = obj_ obj_.original_tagname_ = 'extracellularProperties' - super(BiophysicalProperties2CaPools, self).buildChildren(child_, node, nodeName_, True) + super(BiophysicalProperties2CaPools, self)._buildChildren(child_, node, nodeName_, True) # end class BiophysicalProperties2CaPools class BiophysicalProperties(Standalone): - """Standalone element which is usually inside a single cell, but could - be outside and referenced by id.""" + """BiophysicalProperties -- Standalone element which is usually inside a single cell, but could be outside and + referenced by id. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('membrane_properties', 'MembraneProperties', 0, 0, {u'type': u'MembraneProperties', u'name': u'membraneProperties'}, None), - MemberSpec_('intracellular_properties', 'IntracellularProperties', 0, 1, {u'type': u'IntracellularProperties', u'name': u'intracellularProperties', u'minOccurs': u'0'}, None), - MemberSpec_('extracellular_properties', 'ExtracellularProperties', 0, 1, {u'type': u'ExtracellularProperties', u'name': u'extracellularProperties', u'minOccurs': u'0'}, None), + MemberSpec_('membrane_properties', 'MembraneProperties', 0, 0, {'name': 'membraneProperties', 'type': 'MembraneProperties'}, None), + MemberSpec_('intracellular_properties', 'IntracellularProperties', 0, 1, {'minOccurs': '0', 'name': 'intracellularProperties', 'type': 'IntracellularProperties'}, None), + MemberSpec_('extracellular_properties', 'ExtracellularProperties', 0, 1, {'minOccurs': '0', 'name': 'extracellularProperties', 'type': 'ExtracellularProperties'}, None), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, membrane_properties=None, intracellular_properties=None, extracellular_properties=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, membrane_properties=None, intracellular_properties=None, extracellular_properties=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BiophysicalProperties, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BiophysicalProperties"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.membrane_properties = membrane_properties + self.membrane_properties_nsprefix_ = None self.intracellular_properties = intracellular_properties + self.intracellular_properties_nsprefix_ = None self.extracellular_properties = extracellular_properties + self.extracellular_properties_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -11935,17 +15964,33 @@ def factory(*args_, **kwargs_): else: return BiophysicalProperties(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_membraneProperties(self): + return self.membrane_properties + def set_membraneProperties(self, membrane_properties): + self.membrane_properties = membrane_properties + def get_intracellularProperties(self): + return self.intracellular_properties + def set_intracellularProperties(self, intracellular_properties): + self.intracellular_properties = intracellular_properties + def get_extracellularProperties(self): + return self.extracellular_properties + def set_extracellularProperties(self, extracellular_properties): + self.extracellular_properties = extracellular_properties + def _hasContent(self): if ( self.membrane_properties is not None or self.intracellular_properties is not None or self.extracellular_properties is not None or - super(BiophysicalProperties, self).hasContent_() + super(BiophysicalProperties, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BiophysicalProperties', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='BiophysicalProperties', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('BiophysicalProperties') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -11953,81 +15998,98 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BiophysicalProperties': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BiophysicalProperties', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BiophysicalProperties', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BiophysicalProperties'): - super(BiophysicalProperties, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BiophysicalProperties', fromsubclass_=False, pretty_print=True): - super(BiophysicalProperties, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BiophysicalProperties'): + super(BiophysicalProperties, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='BiophysicalProperties', fromsubclass_=False, pretty_print=True): + super(BiophysicalProperties, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.membrane_properties is not None: + namespaceprefix_ = self.membrane_properties_nsprefix_ + ':' if (UseCapturedNS_ and self.membrane_properties_nsprefix_) else '' self.membrane_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='membraneProperties', pretty_print=pretty_print) if self.intracellular_properties is not None: + namespaceprefix_ = self.intracellular_properties_nsprefix_ + ':' if (UseCapturedNS_ and self.intracellular_properties_nsprefix_) else '' self.intracellular_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='intracellularProperties', pretty_print=pretty_print) if self.extracellular_properties is not None: + namespaceprefix_ = self.extracellular_properties_nsprefix_ + ':' if (UseCapturedNS_ and self.extracellular_properties_nsprefix_) else '' self.extracellular_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='extracellularProperties', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(BiophysicalProperties, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(BiophysicalProperties, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'membraneProperties': class_obj_ = self.get_class_obj_(child_, MembraneProperties) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.membrane_properties = obj_ obj_.original_tagname_ = 'membraneProperties' elif nodeName_ == 'intracellularProperties': class_obj_ = self.get_class_obj_(child_, IntracellularProperties) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.intracellular_properties = obj_ obj_.original_tagname_ = 'intracellularProperties' elif nodeName_ == 'extracellularProperties': obj_ = ExtracellularProperties.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.extracellular_properties = obj_ obj_.original_tagname_ = 'extracellularProperties' - super(BiophysicalProperties, self).buildChildren(child_, node, nodeName_, True) + super(BiophysicalProperties, self)._buildChildren(child_, node, nodeName_, True) # end class BiophysicalProperties class InhomogeneousParameter(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('variable', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('metric', 'Metric', 0, 0, {'use': u'required'}), - MemberSpec_('proximal', 'ProximalDetails', 0, 1, {u'type': u'ProximalDetails', u'name': u'proximal', u'minOccurs': u'0'}, None), - MemberSpec_('distal', 'DistalDetails', 0, 1, {u'type': u'DistalDetails', u'name': u'distal', u'minOccurs': u'0'}, None), + MemberSpec_('variable', 'xs:string', 0, 0, {'use': 'required', 'name': 'variable'}), + MemberSpec_('metric', 'Metric', 0, 0, {'use': 'required', 'name': 'metric'}), + MemberSpec_('proximal', 'ProximalDetails', 0, 1, {'minOccurs': '0', 'name': 'proximal', 'type': 'ProximalDetails'}, None), + MemberSpec_('distal', 'DistalDetails', 0, 1, {'minOccurs': '0', 'name': 'distal', 'type': 'DistalDetails'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, variable=None, metric=None, proximal=None, distal=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, variable=None, metric=None, proximal=None, distal=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(InhomogeneousParameter, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("InhomogeneousParameter"), self).__init__(neuro_lex_id, id, **kwargs_) self.variable = _cast(None, variable) + self.variable_nsprefix_ = None self.metric = _cast(None, metric) + self.metric_nsprefix_ = None self.proximal = proximal + self.proximal_nsprefix_ = None self.distal = distal + self.distal_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -12039,28 +16101,49 @@ def factory(*args_, **kwargs_): else: return InhomogeneousParameter(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_proximal(self): + return self.proximal + def set_proximal(self, proximal): + self.proximal = proximal + def get_distal(self): + return self.distal + def set_distal(self, distal): + self.distal = distal + def get_variable(self): + return self.variable + def set_variable(self, variable): + self.variable = variable + def get_metric(self): + return self.metric + def set_metric(self, metric): + self.metric = metric def validate_Metric(self, value): # Validate type Metric, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False + value = value enumerations = ['Path Length from root'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on Metric' % {"value" : value.encode("utf-8")} ) - def hasContent_(self): + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on Metric' % {"value" : encode_str_2_3(value), "lineno": lineno} ) + result = False + def _hasContent(self): if ( self.proximal is not None or self.distal is not None or - super(InhomogeneousParameter, self).hasContent_() + super(InhomogeneousParameter, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InhomogeneousParameter', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='InhomogeneousParameter', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('InhomogeneousParameter') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -12068,45 +16151,53 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'InhomogeneousParameter': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InhomogeneousParameter') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InhomogeneousParameter') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InhomogeneousParameter', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InhomogeneousParameter', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InhomogeneousParameter'): - super(InhomogeneousParameter, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InhomogeneousParameter') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InhomogeneousParameter'): + super(InhomogeneousParameter, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InhomogeneousParameter') if self.variable is not None and 'variable' not in already_processed: already_processed.add('variable') outfile.write(' variable=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.variable), input_name='variable')), )) if self.metric is not None and 'metric' not in already_processed: already_processed.add('metric') - outfile.write(' metric=%s' % (quote_attrib(self.metric), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InhomogeneousParameter', fromsubclass_=False, pretty_print=True): - super(InhomogeneousParameter, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' metric=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.metric), input_name='metric')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='InhomogeneousParameter', fromsubclass_=False, pretty_print=True): + super(InhomogeneousParameter, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.proximal is not None: + namespaceprefix_ = self.proximal_nsprefix_ + ':' if (UseCapturedNS_ and self.proximal_nsprefix_) else '' self.proximal.export(outfile, level, namespaceprefix_, namespacedef_='', name_='proximal', pretty_print=pretty_print) if self.distal is not None: + namespaceprefix_ = self.distal_nsprefix_ + ':' if (UseCapturedNS_ and self.distal_nsprefix_) else '' self.distal.export(outfile, level, namespaceprefix_, namespacedef_='', name_='distal', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('variable', node) if value is not None and 'variable' not in already_processed: already_processed.add('variable') @@ -12116,66 +16207,78 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('metric') self.metric = value self.validate_Metric(self.metric) # validate type Metric - super(InhomogeneousParameter, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(InhomogeneousParameter, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'proximal': obj_ = ProximalDetails.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.proximal = obj_ obj_.original_tagname_ = 'proximal' elif nodeName_ == 'distal': obj_ = DistalDetails.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.distal = obj_ obj_.original_tagname_ = 'distal' - super(InhomogeneousParameter, self).buildChildren(child_, node, nodeName_, True) + super(InhomogeneousParameter, self)._buildChildren(child_, node, nodeName_, True) # end class InhomogeneousParameter class SegmentGroup(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('properties', 'Property', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Property', u'name': u'property', u'minOccurs': u'0'}, None), - MemberSpec_('annotation', 'Annotation', 0, 1, {u'type': u'Annotation', u'name': u'annotation', u'minOccurs': u'0'}, None), - MemberSpec_('members', 'Member', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Member', u'name': u'member', u'minOccurs': u'0'}, None), - MemberSpec_('includes', 'Include', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Include', u'name': u'include', u'minOccurs': u'0'}, None), - MemberSpec_('paths', 'Path', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Path', u'name': u'path', u'minOccurs': u'0'}, None), - MemberSpec_('sub_trees', 'SubTree', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SubTree', u'name': u'subTree', u'minOccurs': u'0'}, None), - MemberSpec_('inhomogeneous_parameters', 'InhomogeneousParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InhomogeneousParameter', u'name': u'inhomogeneousParameter', u'minOccurs': u'0'}, None), + MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {'minOccurs': '0', 'name': 'notes', 'type': 'xs:string'}, None), + MemberSpec_('properties', 'Property', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'property', 'type': 'Property'}, None), + MemberSpec_('annotation', 'Annotation', 0, 1, {'minOccurs': '0', 'name': 'annotation', 'type': 'Annotation'}, None), + MemberSpec_('members', 'Member', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'member', 'type': 'Member'}, None), + MemberSpec_('includes', 'Include', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'include', 'type': 'Include'}, None), + MemberSpec_('paths', 'Path', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'path', 'type': 'Path'}, None), + MemberSpec_('sub_trees', 'SubTree', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'subTree', 'type': 'SubTree'}, None), + MemberSpec_('inhomogeneous_parameters', 'InhomogeneousParameter', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'inhomogeneousParameter', 'type': 'InhomogeneousParameter'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, notes=None, properties=None, annotation=None, members=None, includes=None, paths=None, sub_trees=None, inhomogeneous_parameters=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, notes=None, properties=None, annotation=None, members=None, includes=None, paths=None, sub_trees=None, inhomogeneous_parameters=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(SegmentGroup, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("SegmentGroup"), self).__init__(neuro_lex_id, id, **kwargs_) self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None if properties is None: self.properties = [] else: self.properties = properties + self.properties_nsprefix_ = None self.annotation = annotation + self.annotation_nsprefix_ = None if members is None: self.members = [] else: self.members = members + self.members_nsprefix_ = None if includes is None: self.includes = [] else: self.includes = includes + self.includes_nsprefix_ = None if paths is None: self.paths = [] else: self.paths = paths + self.paths_nsprefix_ = None if sub_trees is None: self.sub_trees = [] else: self.sub_trees = sub_trees + self.sub_trees_nsprefix_ = None if inhomogeneous_parameters is None: self.inhomogeneous_parameters = [] else: self.inhomogeneous_parameters = inhomogeneous_parameters + self.inhomogeneous_parameters_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -12187,11 +16290,89 @@ def factory(*args_, **kwargs_): else: return SegmentGroup(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_notes(self): + return self.notes + def set_notes(self, notes): + self.notes = notes + def get_property(self): + return self.properties + def set_property(self, properties): + self.properties = properties + def add_property(self, value): + self.properties.append(value) + def insert_property_at(self, index, value): + self.properties.insert(index, value) + def replace_property_at(self, index, value): + self.properties[index] = value + def get_annotation(self): + return self.annotation + def set_annotation(self, annotation): + self.annotation = annotation + def get_member(self): + return self.members + def set_member(self, members): + self.members = members + def add_member(self, value): + self.members.append(value) + def insert_member_at(self, index, value): + self.members.insert(index, value) + def replace_member_at(self, index, value): + self.members[index] = value + def get_include(self): + return self.includes + def set_include(self, includes): + self.includes = includes + def add_include(self, value): + self.includes.append(value) + def insert_include_at(self, index, value): + self.includes.insert(index, value) + def replace_include_at(self, index, value): + self.includes[index] = value + def get_path(self): + return self.paths + def set_path(self, paths): + self.paths = paths + def add_path(self, value): + self.paths.append(value) + def insert_path_at(self, index, value): + self.paths.insert(index, value) + def replace_path_at(self, index, value): + self.paths[index] = value + def get_subTree(self): + return self.sub_trees + def set_subTree(self, sub_trees): + self.sub_trees = sub_trees + def add_subTree(self, value): + self.sub_trees.append(value) + def insert_subTree_at(self, index, value): + self.sub_trees.insert(index, value) + def replace_subTree_at(self, index, value): + self.sub_trees[index] = value + def get_inhomogeneousParameter(self): + return self.inhomogeneous_parameters + def set_inhomogeneousParameter(self, inhomogeneous_parameters): + self.inhomogeneous_parameters = inhomogeneous_parameters + def add_inhomogeneousParameter(self, value): + self.inhomogeneous_parameters.append(value) + def insert_inhomogeneousParameter_at(self, index, value): + self.inhomogeneous_parameters.insert(index, value) + def replace_inhomogeneousParameter_at(self, index, value): + self.inhomogeneous_parameters[index] = value def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + return result + def _hasContent(self): if ( self.notes is not None or self.properties or @@ -12201,12 +16382,12 @@ def hasContent_(self): self.paths or self.sub_trees or self.inhomogeneous_parameters or - super(SegmentGroup, self).hasContent_() + super(SegmentGroup, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentGroup', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='SegmentGroup', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('SegmentGroup') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -12214,96 +16395,112 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SegmentGroup': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentGroup') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentGroup') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SegmentGroup', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SegmentGroup', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SegmentGroup'): - super(SegmentGroup, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentGroup') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentGroup', fromsubclass_=False, pretty_print=True): - super(SegmentGroup, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SegmentGroup'): + super(SegmentGroup, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentGroup') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='SegmentGroup', fromsubclass_=False, pretty_print=True): + super(SegmentGroup, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.notes is not None: + namespaceprefix_ = self.notes_nsprefix_ + ':' if (UseCapturedNS_ and self.notes_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) for property_ in self.properties: + namespaceprefix_ = self.properties_nsprefix_ + ':' if (UseCapturedNS_ and self.properties_nsprefix_) else '' property_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='property', pretty_print=pretty_print) if self.annotation is not None: + namespaceprefix_ = self.annotation_nsprefix_ + ':' if (UseCapturedNS_ and self.annotation_nsprefix_) else '' self.annotation.export(outfile, level, namespaceprefix_, namespacedef_='', name_='annotation', pretty_print=pretty_print) for member_ in self.members: + namespaceprefix_ = self.members_nsprefix_ + ':' if (UseCapturedNS_ and self.members_nsprefix_) else '' member_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='member', pretty_print=pretty_print) for include_ in self.includes: + namespaceprefix_ = self.includes_nsprefix_ + ':' if (UseCapturedNS_ and self.includes_nsprefix_) else '' include_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='include', pretty_print=pretty_print) for path_ in self.paths: + namespaceprefix_ = self.paths_nsprefix_ + ':' if (UseCapturedNS_ and self.paths_nsprefix_) else '' path_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='path', pretty_print=pretty_print) for subTree_ in self.sub_trees: + namespaceprefix_ = self.sub_trees_nsprefix_ + ':' if (UseCapturedNS_ and self.sub_trees_nsprefix_) else '' subTree_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='subTree', pretty_print=pretty_print) for inhomogeneousParameter_ in self.inhomogeneous_parameters: + namespaceprefix_ = self.inhomogeneous_parameters_nsprefix_ + ':' if (UseCapturedNS_ and self.inhomogeneous_parameters_nsprefix_) else '' inhomogeneousParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='inhomogeneousParameter', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(SegmentGroup, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(SegmentGroup, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'notes') + value_ = self.gds_validate_string(value_, node, 'notes') + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) elif nodeName_ == 'property': obj_ = Property.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.properties.append(obj_) obj_.original_tagname_ = 'property' elif nodeName_ == 'annotation': obj_ = Annotation.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.annotation = obj_ obj_.original_tagname_ = 'annotation' elif nodeName_ == 'member': obj_ = Member.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.members.append(obj_) obj_.original_tagname_ = 'member' elif nodeName_ == 'include': obj_ = Include.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.includes.append(obj_) obj_.original_tagname_ = 'include' elif nodeName_ == 'path': obj_ = Path.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.paths.append(obj_) obj_.original_tagname_ = 'path' elif nodeName_ == 'subTree': obj_ = SubTree.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sub_trees.append(obj_) obj_.original_tagname_ = 'subTree' elif nodeName_ == 'inhomogeneousParameter': obj_ = InhomogeneousParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.inhomogeneous_parameters.append(obj_) obj_.original_tagname_ = 'inhomogeneousParameter' - super(SegmentGroup, self).buildChildren(child_, node, nodeName_, True) + super(SegmentGroup, self)._buildChildren(child_, node, nodeName_, True) def __str__(self): @@ -12318,22 +16515,30 @@ def __repr__(self): class Segment(BaseNonNegativeIntegerId): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('parent', 'SegmentParent', 0, 1, {u'type': u'SegmentParent', u'name': u'parent', u'minOccurs': u'0'}, None), - MemberSpec_('proximal', 'Point3DWithDiam', 0, 1, {u'type': u'Point3DWithDiam', u'name': u'proximal', u'minOccurs': u'0'}, None), - MemberSpec_('distal', 'Point3DWithDiam', 0, 0, {u'type': u'Point3DWithDiam', u'name': u'distal', u'minOccurs': u'1'}, None), + MemberSpec_('name', 'xs:string', 0, 1, {'use': 'optional', 'name': 'name'}), + MemberSpec_('parent', 'SegmentParent', 0, 1, {'minOccurs': '0', 'name': 'parent', 'type': 'SegmentParent'}, None), + MemberSpec_('proximal', 'Point3DWithDiam', 0, 1, {'minOccurs': '0', 'name': 'proximal', 'type': 'Point3DWithDiam'}, None), + MemberSpec_('distal', 'Point3DWithDiam', 0, 0, {'minOccurs': '1', 'name': 'distal', 'type': 'Point3DWithDiam'}, None), ] subclass = None superclass = BaseNonNegativeIntegerId - def __init__(self, neuro_lex_id=None, id=None, name=None, parent=None, proximal=None, distal=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, name=None, parent=None, proximal=None, distal=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Segment, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Segment"), self).__init__(neuro_lex_id, id, **kwargs_) self.name = _cast(None, name) + self.name_nsprefix_ = None self.parent = parent + self.parent_nsprefix_ = None self.proximal = proximal + self.proximal_nsprefix_ = None self.distal = distal + self.distal_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -12345,17 +16550,37 @@ def factory(*args_, **kwargs_): else: return Segment(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_parent(self): + return self.parent + def set_parent(self, parent): + self.parent = parent + def get_proximal(self): + return self.proximal + def set_proximal(self, proximal): + self.proximal = proximal + def get_distal(self): + return self.distal + def set_distal(self, distal): + self.distal = distal + def get_name(self): + return self.name + def set_name(self, name): + self.name = name + def _hasContent(self): if ( self.parent is not None or self.proximal is not None or self.distal is not None or - super(Segment, self).hasContent_() + super(Segment, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Segment', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Segment', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Segment') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -12363,66 +16588,75 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Segment': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Segment') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Segment') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Segment', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Segment', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Segment'): - super(Segment, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Segment') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Segment'): + super(Segment, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Segment') if self.name is not None and 'name' not in already_processed: already_processed.add('name') outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Segment', fromsubclass_=False, pretty_print=True): - super(Segment, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Segment', fromsubclass_=False, pretty_print=True): + super(Segment, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.parent is not None: + namespaceprefix_ = self.parent_nsprefix_ + ':' if (UseCapturedNS_ and self.parent_nsprefix_) else '' self.parent.export(outfile, level, namespaceprefix_, namespacedef_='', name_='parent', pretty_print=pretty_print) if self.proximal is not None: + namespaceprefix_ = self.proximal_nsprefix_ + ':' if (UseCapturedNS_ and self.proximal_nsprefix_) else '' self.proximal.export(outfile, level, namespaceprefix_, namespacedef_='', name_='proximal', pretty_print=pretty_print) if self.distal is not None: + namespaceprefix_ = self.distal_nsprefix_ + ':' if (UseCapturedNS_ and self.distal_nsprefix_) else '' self.distal.export(outfile, level, namespaceprefix_, namespacedef_='', name_='distal', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('name', node) if value is not None and 'name' not in already_processed: already_processed.add('name') self.name = value - super(Segment, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(Segment, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'parent': obj_ = SegmentParent.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.parent = obj_ obj_.original_tagname_ = 'parent' elif nodeName_ == 'proximal': obj_ = Point3DWithDiam.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.proximal = obj_ obj_.original_tagname_ = 'proximal' elif nodeName_ == 'distal': obj_ = Point3DWithDiam.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.distal = obj_ obj_.original_tagname_ = 'distal' - super(Segment, self).buildChildren(child_, node, nodeName_, True) + super(Segment, self)._buildChildren(child_, node, nodeName_, True) @property def length(self): """Get the length of the segment. @@ -12514,26 +16748,34 @@ def surface_area(self): class Morphology(Standalone): - """Standalone element which is usually inside a single cell, but could - be outside and referenced by id.""" + """Morphology -- Standalone element which is usually inside a single cell, but could be outside and + referenced by id. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('segments', 'Segment', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'Segment', u'name': u'segment'}, None), - MemberSpec_('segment_groups', 'SegmentGroup', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SegmentGroup', u'name': u'segmentGroup', u'minOccurs': u'0'}, None), + MemberSpec_('segments', 'Segment', 1, 0, {'maxOccurs': 'unbounded', 'name': 'segment', 'type': 'Segment'}, None), + MemberSpec_('segment_groups', 'SegmentGroup', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'segmentGroup', 'type': 'SegmentGroup'}, None), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, segments=None, segment_groups=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, segments=None, segment_groups=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Morphology, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Morphology"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) if segments is None: self.segments = [] else: self.segments = segments + self.segments_nsprefix_ = None if segment_groups is None: self.segment_groups = [] else: self.segment_groups = segment_groups + self.segment_groups_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -12545,16 +16787,40 @@ def factory(*args_, **kwargs_): else: return Morphology(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_segment(self): + return self.segments + def set_segment(self, segments): + self.segments = segments + def add_segment(self, value): + self.segments.append(value) + def insert_segment_at(self, index, value): + self.segments.insert(index, value) + def replace_segment_at(self, index, value): + self.segments[index] = value + def get_segmentGroup(self): + return self.segment_groups + def set_segmentGroup(self, segment_groups): + self.segment_groups = segment_groups + def add_segmentGroup(self, value): + self.segment_groups.append(value) + def insert_segmentGroup_at(self, index, value): + self.segment_groups.insert(index, value) + def replace_segmentGroup_at(self, index, value): + self.segment_groups[index] = value + def _hasContent(self): if ( self.segments or self.segment_groups or - super(Morphology, self).hasContent_() + super(Morphology, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Morphology', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Morphology', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Morphology') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -12562,52 +16828,60 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='M eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Morphology': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Morphology') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Morphology') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Morphology', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Morphology', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Morphology'): - super(Morphology, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Morphology') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Morphology', fromsubclass_=False, pretty_print=True): - super(Morphology, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Morphology'): + super(Morphology, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Morphology') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Morphology', fromsubclass_=False, pretty_print=True): + super(Morphology, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for segment_ in self.segments: + namespaceprefix_ = self.segments_nsprefix_ + ':' if (UseCapturedNS_ and self.segments_nsprefix_) else '' segment_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='segment', pretty_print=pretty_print) for segmentGroup_ in self.segment_groups: + namespaceprefix_ = self.segment_groups_nsprefix_ + ':' if (UseCapturedNS_ and self.segment_groups_nsprefix_) else '' segmentGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='segmentGroup', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(Morphology, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(Morphology, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'segment': obj_ = Segment.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.segments.append(obj_) obj_.original_tagname_ = 'segment' elif nodeName_ == 'segmentGroup': obj_ = SegmentGroup.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.segment_groups.append(obj_) obj_.original_tagname_ = 'segmentGroup' - super(Morphology, self).buildChildren(child_, node, nodeName_, True) + super(Morphology, self)._buildChildren(child_, node, nodeName_, True) @property def num_segments(self): """Get the number of segments included in this cell morphology. @@ -12620,14 +16894,18 @@ def num_segments(self): class BaseCell(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BaseCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BaseCell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -12640,9 +16918,15 @@ def factory(*args_, **kwargs_): else: return BaseCell(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( - super(BaseCell, self).hasContent_() + super(BaseCell, self)._hasContent() ): return True else: @@ -12655,55 +16939,69 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseCell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseCell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseCell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseCell'): - super(BaseCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseCell'): + super(BaseCell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCell') if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCell', fromsubclass_=False, pretty_print=True): - super(BaseCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCell', fromsubclass_=False, pretty_print=True): + super(BaseCell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BaseCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseCell, self).buildChildren(child_, node, nodeName_, True) + super(BaseCell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BaseCell, self)._buildChildren(child_, node, nodeName_, True) pass # end class BaseCell class BaseSynapse(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BaseSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BaseSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -12716,9 +17014,15 @@ def factory(*args_, **kwargs_): else: return BaseSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( - super(BaseSynapse, self).hasContent_() + super(BaseSynapse, self)._hasContent() ): return True else: @@ -12731,64 +17035,84 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseSynapse'): - super(BaseSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseSynapse'): + super(BaseSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseSynapse') if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseSynapse', fromsubclass_=False, pretty_print=True): - super(BaseSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseSynapse', fromsubclass_=False, pretty_print=True): + super(BaseSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BaseSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BaseSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BaseSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class BaseSynapse class FixedFactorConcentrationModel(Standalone): - """Should not be required, as it's present on the species element!""" + """ion -- Should not be required, as it's present on the species element! + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('resting_conc', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('decay_constant', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('rho', 'Nml2Quantity_rhoFactor', 0, 0, {'use': u'required'}), + MemberSpec_('ion', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion'}), + MemberSpec_('resting_conc', 'Nml2Quantity_concentration', 0, 0, {'use': 'required', 'name': 'resting_conc'}), + MemberSpec_('decay_constant', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'decay_constant'}), + MemberSpec_('rho', 'Nml2Quantity_rhoFactor', 0, 0, {'use': 'required', 'name': 'rho'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, ion=None, resting_conc=None, decay_constant=None, rho=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, ion=None, resting_conc=None, decay_constant=None, rho=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(FixedFactorConcentrationModel, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("FixedFactorConcentrationModel"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.ion = _cast(None, ion) + self.ion_nsprefix_ = None self.resting_conc = _cast(None, resting_conc) + self.resting_conc_nsprefix_ = None self.decay_constant = _cast(None, decay_constant) + self.decay_constant_nsprefix_ = None self.rho = _cast(None, rho) + self.rho_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -12800,37 +17124,73 @@ def factory(*args_, **kwargs_): else: return FixedFactorConcentrationModel(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ion(self): + return self.ion + def set_ion(self, ion): + self.ion = ion + def get_restingConc(self): + return self.resting_conc + def set_restingConc(self, resting_conc): + self.resting_conc = resting_conc + def get_decayConstant(self): + return self.decay_constant + def set_decayConstant(self, decay_constant): + self.decay_constant = decay_constant + def get_rho(self): + return self.rho + def set_rho(self, rho): + self.rho = rho def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_concentration(self, value): # Validate type Nml2Quantity_concentration, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_concentration_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_concentration_patterns_, )) - validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_concentration_patterns_, )) + validate_Nml2Quantity_concentration_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM))$']] def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_rhoFactor(self, value): # Validate type Nml2Quantity_rhoFactor, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_rhoFactor_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_rhoFactor_patterns_, )) - validate_Nml2Quantity_rhoFactor_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m_per_A_per_s|mol_per_cm_per_uA_per_ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_rhoFactor_patterns_, )) + validate_Nml2Quantity_rhoFactor_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m_per_A_per_s|mol_per_cm_per_uA_per_ms))$']] + def _hasContent(self): if ( - super(FixedFactorConcentrationModel, self).hasContent_() + super(FixedFactorConcentrationModel, self)._hasContent() ): return True else: @@ -12843,43 +17203,49 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='F eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'FixedFactorConcentrationModel': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FixedFactorConcentrationModel') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FixedFactorConcentrationModel') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FixedFactorConcentrationModel', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FixedFactorConcentrationModel', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FixedFactorConcentrationModel'): - super(FixedFactorConcentrationModel, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FixedFactorConcentrationModel') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FixedFactorConcentrationModel'): + super(FixedFactorConcentrationModel, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FixedFactorConcentrationModel') if self.ion is not None and 'ion' not in already_processed: already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) + outfile.write(' ion=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion), input_name='ion')), )) if self.resting_conc is not None and 'resting_conc' not in already_processed: already_processed.add('resting_conc') - outfile.write(' restingConc=%s' % (quote_attrib(self.resting_conc), )) + outfile.write(' restingConc=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.resting_conc), input_name='restingConc')), )) if self.decay_constant is not None and 'decay_constant' not in already_processed: already_processed.add('decay_constant') - outfile.write(' decayConstant=%s' % (quote_attrib(self.decay_constant), )) + outfile.write(' decayConstant=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.decay_constant), input_name='decayConstant')), )) if self.rho is not None and 'rho' not in already_processed: already_processed.add('rho') - outfile.write(' rho=%s' % (quote_attrib(self.rho), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FixedFactorConcentrationModel', fromsubclass_=False, pretty_print=True): - super(FixedFactorConcentrationModel, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' rho=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.rho), input_name='rho')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FixedFactorConcentrationModel', fromsubclass_=False, pretty_print=True): + super(FixedFactorConcentrationModel, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('ion', node) if value is not None and 'ion' not in already_processed: already_processed.add('ion') @@ -12900,31 +17266,41 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('rho') self.rho = value self.validate_Nml2Quantity_rhoFactor(self.rho) # validate type Nml2Quantity_rhoFactor - super(FixedFactorConcentrationModel, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(FixedFactorConcentrationModel, self).buildChildren(child_, node, nodeName_, True) + super(FixedFactorConcentrationModel, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(FixedFactorConcentrationModel, self)._buildChildren(child_, node, nodeName_, True) pass # end class FixedFactorConcentrationModel class DecayingPoolConcentrationModel(Standalone): - """Should not be required, as it's present on the species element!""" + """ion -- Should not be required, as it's present on the species element! + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('resting_conc', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('decay_constant', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('shell_thickness', 'Nml2Quantity_length', 0, 0, {'use': u'required'}), + MemberSpec_('ion', 'NmlId', 0, 0, {'use': 'required', 'name': 'ion'}), + MemberSpec_('resting_conc', 'Nml2Quantity_concentration', 0, 0, {'use': 'required', 'name': 'resting_conc'}), + MemberSpec_('decay_constant', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'decay_constant'}), + MemberSpec_('shell_thickness', 'Nml2Quantity_length', 0, 0, {'use': 'required', 'name': 'shell_thickness'}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, ion=None, resting_conc=None, decay_constant=None, shell_thickness=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, ion=None, resting_conc=None, decay_constant=None, shell_thickness=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(DecayingPoolConcentrationModel, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("DecayingPoolConcentrationModel"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.ion = _cast(None, ion) + self.ion_nsprefix_ = None self.resting_conc = _cast(None, resting_conc) + self.resting_conc_nsprefix_ = None self.decay_constant = _cast(None, decay_constant) + self.decay_constant_nsprefix_ = None self.shell_thickness = _cast(None, shell_thickness) + self.shell_thickness_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -12937,37 +17313,75 @@ def factory(*args_, **kwargs_): else: return DecayingPoolConcentrationModel(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_ion(self): + return self.ion + def set_ion(self, ion): + self.ion = ion + def get_restingConc(self): + return self.resting_conc + def set_restingConc(self, resting_conc): + self.resting_conc = resting_conc + def get_decayConstant(self): + return self.decay_constant + def set_decayConstant(self, decay_constant): + self.decay_constant = decay_constant + def get_shellThickness(self): + return self.shell_thickness + def set_shellThickness(self, shell_thickness): + self.shell_thickness = shell_thickness + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_concentration(self, value): # Validate type Nml2Quantity_concentration, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_concentration_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_concentration_patterns_, )) - validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_concentration_patterns_, )) + validate_Nml2Quantity_concentration_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM))$']] def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_length(self, value): # Validate type Nml2Quantity_length, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_length_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_length_patterns_, )) - validate_Nml2Quantity_length_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m|cm|um)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_length_patterns_, )) + validate_Nml2Quantity_length_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m|cm|um))$']] + def _hasContent(self): if ( - super(DecayingPoolConcentrationModel, self).hasContent_() + super(DecayingPoolConcentrationModel, self)._hasContent() ): return True else: @@ -12980,47 +17394,57 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='D eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'DecayingPoolConcentrationModel': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DecayingPoolConcentrationModel') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DecayingPoolConcentrationModel') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DecayingPoolConcentrationModel', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DecayingPoolConcentrationModel', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DecayingPoolConcentrationModel'): - super(DecayingPoolConcentrationModel, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DecayingPoolConcentrationModel') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DecayingPoolConcentrationModel'): + super(DecayingPoolConcentrationModel, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DecayingPoolConcentrationModel') if self.ion is not None and 'ion' not in already_processed: already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) + outfile.write(' ion=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ion), input_name='ion')), )) if self.resting_conc is not None and 'resting_conc' not in already_processed: already_processed.add('resting_conc') - outfile.write(' restingConc=%s' % (quote_attrib(self.resting_conc), )) + outfile.write(' restingConc=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.resting_conc), input_name='restingConc')), )) if self.decay_constant is not None and 'decay_constant' not in already_processed: already_processed.add('decay_constant') - outfile.write(' decayConstant=%s' % (quote_attrib(self.decay_constant), )) + outfile.write(' decayConstant=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.decay_constant), input_name='decayConstant')), )) if self.shell_thickness is not None and 'shell_thickness' not in already_processed: already_processed.add('shell_thickness') - outfile.write(' shellThickness=%s' % (quote_attrib(self.shell_thickness), )) + outfile.write(' shellThickness=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.shell_thickness), input_name='shellThickness')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DecayingPoolConcentrationModel', fromsubclass_=False, pretty_print=True): - super(DecayingPoolConcentrationModel, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DecayingPoolConcentrationModel', fromsubclass_=False, pretty_print=True): + super(DecayingPoolConcentrationModel, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('ion', node) if value is not None and 'ion' not in already_processed: already_processed.add('ion') @@ -13045,33 +17469,42 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(DecayingPoolConcentrationModel, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(DecayingPoolConcentrationModel, self).buildChildren(child_, node, nodeName_, True) + super(DecayingPoolConcentrationModel, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(DecayingPoolConcentrationModel, self)._buildChildren(child_, node, nodeName_, True) pass # end class DecayingPoolConcentrationModel class GateFractionalSubgate(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('fractional_conductance', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'1'}, None), + MemberSpec_('fractional_conductance', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'fractional_conductance'}), + MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {'minOccurs': '0', 'name': 'notes', 'type': 'xs:string'}, None), + MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {'minOccurs': '0', 'name': 'q10Settings', 'type': 'Q10Settings'}, None), + MemberSpec_('steady_state', 'HHVariable', 0, 0, {'minOccurs': '1', 'name': 'steadyState', 'type': 'HHVariable'}, None), + MemberSpec_('time_course', 'HHTime', 0, 0, {'minOccurs': '1', 'name': 'timeCourse', 'type': 'HHTime'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, fractional_conductance=None, notes=None, q10_settings=None, steady_state=None, time_course=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, fractional_conductance=None, notes=None, q10_settings=None, steady_state=None, time_course=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(GateFractionalSubgate, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("GateFractionalSubgate"), self).__init__(neuro_lex_id, id, **kwargs_) self.fractional_conductance = _cast(None, fractional_conductance) + self.fractional_conductance_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None self.time_course = time_course + self.time_course_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -13083,29 +17516,63 @@ def factory(*args_, **kwargs_): else: return GateFractionalSubgate(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_notes(self): + return self.notes + def set_notes(self, notes): + self.notes = notes + def get_q10Settings(self): + return self.q10_settings + def set_q10Settings(self, q10_settings): + self.q10_settings = q10_settings + def get_steadyState(self): + return self.steady_state + def set_steadyState(self, steady_state): + self.steady_state = steady_state + def get_timeCourse(self): + return self.time_course + def set_timeCourse(self, time_course): + self.time_course = time_course + def get_fractionalConductance(self): + return self.fractional_conductance + def set_fractionalConductance(self, fractional_conductance): + self.fractional_conductance = fractional_conductance def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False pass + return result def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_none_patterns_, )) + validate_Nml2Quantity_none_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$']] + def _hasContent(self): if ( self.notes is not None or self.q10_settings is not None or self.steady_state is not None or self.time_course is not None or - super(GateFractionalSubgate, self).hasContent_() + super(GateFractionalSubgate, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateFractionalSubgate', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateFractionalSubgate', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateFractionalSubgate') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -13113,100 +17580,120 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GateFractionalSubgate': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractionalSubgate') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractionalSubgate') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateFractionalSubgate', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateFractionalSubgate', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateFractionalSubgate'): - super(GateFractionalSubgate, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractionalSubgate') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateFractionalSubgate'): + super(GateFractionalSubgate, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractionalSubgate') if self.fractional_conductance is not None and 'fractional_conductance' not in already_processed: already_processed.add('fractional_conductance') - outfile.write(' fractionalConductance=%s' % (quote_attrib(self.fractional_conductance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateFractionalSubgate', fromsubclass_=False, pretty_print=True): - super(GateFractionalSubgate, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' fractionalConductance=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.fractional_conductance), input_name='fractionalConductance')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateFractionalSubgate', fromsubclass_=False, pretty_print=True): + super(GateFractionalSubgate, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.notes is not None: + namespaceprefix_ = self.notes_nsprefix_ + ':' if (UseCapturedNS_ and self.notes_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) if self.q10_settings is not None: + namespaceprefix_ = self.q10_settings_nsprefix_ + ':' if (UseCapturedNS_ and self.q10_settings_nsprefix_) else '' self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) if self.steady_state is not None: + namespaceprefix_ = self.steady_state_nsprefix_ + ':' if (UseCapturedNS_ and self.steady_state_nsprefix_) else '' self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) if self.time_course is not None: + namespaceprefix_ = self.time_course_nsprefix_ + ':' if (UseCapturedNS_ and self.time_course_nsprefix_) else '' self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('fractionalConductance', node) if value is not None and 'fractionalConductance' not in already_processed: already_processed.add('fractionalConductance') self.fractional_conductance = value self.validate_Nml2Quantity_none(self.fractional_conductance) # validate type Nml2Quantity_none - super(GateFractionalSubgate, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(GateFractionalSubgate, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'notes') + value_ = self.gds_validate_string(value_, node, 'notes') + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) elif nodeName_ == 'q10Settings': obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ obj_.original_tagname_ = 'q10Settings' elif nodeName_ == 'steadyState': obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ obj_.original_tagname_ = 'steadyState' elif nodeName_ == 'timeCourse': obj_ = HHTime.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.time_course = obj_ obj_.original_tagname_ = 'timeCourse' - super(GateFractionalSubgate, self).buildChildren(child_, node, nodeName_, True) + super(GateFractionalSubgate, self)._buildChildren(child_, node, nodeName_, True) # end class GateFractionalSubgate class GateFractional(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('sub_gates', 'GateFractionalSubgate', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'GateFractionalSubgate', u'name': u'subGate', u'minOccurs': u'1'}, None), + MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': 'required', 'name': 'instances'}), + MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {'minOccurs': '0', 'name': 'notes', 'type': 'xs:string'}, None), + MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {'minOccurs': '0', 'name': 'q10Settings', 'type': 'Q10Settings'}, None), + MemberSpec_('sub_gates', 'GateFractionalSubgate', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '1', 'name': 'subGate', 'type': 'GateFractionalSubgate'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, sub_gates=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, sub_gates=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(GateFractional, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("GateFractional"), self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None if sub_gates is None: self.sub_gates = [] else: self.sub_gates = sub_gates + self.sub_gates_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -13218,25 +17705,61 @@ def factory(*args_, **kwargs_): else: return GateFractional(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_notes(self): + return self.notes + def set_notes(self, notes): + self.notes = notes + def get_q10Settings(self): + return self.q10_settings + def set_q10Settings(self, q10_settings): + self.q10_settings = q10_settings + def get_subGate(self): + return self.sub_gates + def set_subGate(self, sub_gates): + self.sub_gates = sub_gates + def add_subGate(self, value): + self.sub_gates.append(value) + def insert_subGate_at(self, index, value): + self.sub_gates.insert(index, value) + def replace_subGate_at(self, index, value): + self.sub_gates[index] = value + def get_instances(self): + return self.instances + def set_instances(self, instances): + self.instances = instances def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False pass + return result def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + def _hasContent(self): if ( self.notes is not None or self.q10_settings is not None or self.sub_gates or - super(GateFractional, self).hasContent_() + super(GateFractional, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateFractional', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateFractional', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateFractional') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -13244,93 +17767,108 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GateFractional': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractional') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractional') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateFractional', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateFractional', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateFractional'): - super(GateFractional, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractional') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateFractional'): + super(GateFractional, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractional') if self.instances is not None and 'instances' not in already_processed: already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateFractional', fromsubclass_=False, pretty_print=True): - super(GateFractional, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' instances="%s"' % self.gds_format_integer(self.instances, input_name='instances')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateFractional', fromsubclass_=False, pretty_print=True): + super(GateFractional, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.notes is not None: + namespaceprefix_ = self.notes_nsprefix_ + ':' if (UseCapturedNS_ and self.notes_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) if self.q10_settings is not None: + namespaceprefix_ = self.q10_settings_nsprefix_ + ':' if (UseCapturedNS_ and self.q10_settings_nsprefix_) else '' self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) for subGate_ in self.sub_gates: + namespaceprefix_ = self.sub_gates_nsprefix_ + ':' if (UseCapturedNS_ and self.sub_gates_nsprefix_) else '' subGate_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='subGate', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('instances', node) if value is not None and 'instances' not in already_processed: already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.instances = self.gds_parse_integer(value, node, 'instances') if self.instances <= 0: raise_parse_error(node, 'Invalid PositiveInteger') self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateFractional, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(GateFractional, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'notes') + value_ = self.gds_validate_string(value_, node, 'notes') + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) elif nodeName_ == 'q10Settings': obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ obj_.original_tagname_ = 'q10Settings' elif nodeName_ == 'subGate': obj_ = GateFractionalSubgate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sub_gates.append(obj_) obj_.original_tagname_ = 'subGate' - super(GateFractional, self).buildChildren(child_, node, nodeName_, True) + super(GateFractional, self)._buildChildren(child_, node, nodeName_, True) # end class GateFractional class GateHHInstantaneous(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), + MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': 'required', 'name': 'instances'}), + MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {'minOccurs': '0', 'name': 'notes', 'type': 'xs:string'}, None), + MemberSpec_('steady_state', 'HHVariable', 0, 0, {'minOccurs': '1', 'name': 'steadyState', 'type': 'HHVariable'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, steady_state=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, steady_state=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHInstantaneous, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("GateHHInstantaneous"), self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -13342,24 +17880,50 @@ def factory(*args_, **kwargs_): else: return GateHHInstantaneous(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_notes(self): + return self.notes + def set_notes(self, notes): + self.notes = notes + def get_steadyState(self): + return self.steady_state + def set_steadyState(self, steady_state): + self.steady_state = steady_state + def get_instances(self): + return self.instances + def set_instances(self, instances): + self.instances = instances def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False pass + return result def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + def _hasContent(self): if ( self.notes is not None or self.steady_state is not None or - super(GateHHInstantaneous, self).hasContent_() + super(GateHHInstantaneous, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHInstantaneous', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHInstantaneous', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHInstantaneous') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -13367,92 +17931,109 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GateHHInstantaneous': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHInstantaneous') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHInstantaneous') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHInstantaneous', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHInstantaneous', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHInstantaneous'): - super(GateHHInstantaneous, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHInstantaneous') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHInstantaneous'): + super(GateHHInstantaneous, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHInstantaneous') if self.instances is not None and 'instances' not in already_processed: already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHInstantaneous', fromsubclass_=False, pretty_print=True): - super(GateHHInstantaneous, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' instances="%s"' % self.gds_format_integer(self.instances, input_name='instances')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHInstantaneous', fromsubclass_=False, pretty_print=True): + super(GateHHInstantaneous, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.notes is not None: + namespaceprefix_ = self.notes_nsprefix_ + ':' if (UseCapturedNS_ and self.notes_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) if self.steady_state is not None: + namespaceprefix_ = self.steady_state_nsprefix_ + ':' if (UseCapturedNS_ and self.steady_state_nsprefix_) else '' self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('instances', node) if value is not None and 'instances' not in already_processed: already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.instances = self.gds_parse_integer(value, node, 'instances') if self.instances <= 0: raise_parse_error(node, 'Invalid PositiveInteger') self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateHHInstantaneous, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(GateHHInstantaneous, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'notes') + value_ = self.gds_validate_string(value_, node, 'notes') + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) elif nodeName_ == 'steadyState': obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ obj_.original_tagname_ = 'steadyState' - super(GateHHInstantaneous, self).buildChildren(child_, node, nodeName_, True) + super(GateHHInstantaneous, self)._buildChildren(child_, node, nodeName_, True) # end class GateHHInstantaneous class GateHHRatesInf(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'1'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'1'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), + MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': 'required', 'name': 'instances'}), + MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {'minOccurs': '0', 'name': 'notes', 'type': 'xs:string'}, None), + MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {'minOccurs': '0', 'name': 'q10Settings', 'type': 'Q10Settings'}, None), + MemberSpec_('forward_rate', 'HHRate', 0, 0, {'minOccurs': '1', 'name': 'forwardRate', 'type': 'HHRate'}, None), + MemberSpec_('reverse_rate', 'HHRate', 0, 0, {'minOccurs': '1', 'name': 'reverseRate', 'type': 'HHRate'}, None), + MemberSpec_('steady_state', 'HHVariable', 0, 0, {'minOccurs': '1', 'name': 'steadyState', 'type': 'HHVariable'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, steady_state=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, steady_state=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHRatesInf, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("GateHHRatesInf"), self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.forward_rate = forward_rate + self.forward_rate_nsprefix_ = None self.reverse_rate = reverse_rate + self.reverse_rate_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -13464,27 +18045,65 @@ def factory(*args_, **kwargs_): else: return GateHHRatesInf(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_notes(self): + return self.notes + def set_notes(self, notes): + self.notes = notes + def get_q10Settings(self): + return self.q10_settings + def set_q10Settings(self, q10_settings): + self.q10_settings = q10_settings + def get_forwardRate(self): + return self.forward_rate + def set_forwardRate(self, forward_rate): + self.forward_rate = forward_rate + def get_reverseRate(self): + return self.reverse_rate + def set_reverseRate(self, reverse_rate): + self.reverse_rate = reverse_rate + def get_steadyState(self): + return self.steady_state + def set_steadyState(self, steady_state): + self.steady_state = steady_state + def get_instances(self): + return self.instances + def set_instances(self, instances): + self.instances = instances def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False pass + return result def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + def _hasContent(self): if ( self.notes is not None or self.q10_settings is not None or self.forward_rate is not None or self.reverse_rate is not None or self.steady_state is not None or - super(GateHHRatesInf, self).hasContent_() + super(GateHHRatesInf, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesInf', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHRatesInf', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHRatesInf') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -13492,113 +18111,133 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GateHHRatesInf': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesInf') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesInf') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRatesInf', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRatesInf', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRatesInf'): - super(GateHHRatesInf, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesInf') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRatesInf'): + super(GateHHRatesInf, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesInf') if self.instances is not None and 'instances' not in already_processed: already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesInf', fromsubclass_=False, pretty_print=True): - super(GateHHRatesInf, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' instances="%s"' % self.gds_format_integer(self.instances, input_name='instances')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHRatesInf', fromsubclass_=False, pretty_print=True): + super(GateHHRatesInf, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.notes is not None: + namespaceprefix_ = self.notes_nsprefix_ + ':' if (UseCapturedNS_ and self.notes_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) if self.q10_settings is not None: + namespaceprefix_ = self.q10_settings_nsprefix_ + ':' if (UseCapturedNS_ and self.q10_settings_nsprefix_) else '' self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) if self.forward_rate is not None: + namespaceprefix_ = self.forward_rate_nsprefix_ + ':' if (UseCapturedNS_ and self.forward_rate_nsprefix_) else '' self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) if self.reverse_rate is not None: + namespaceprefix_ = self.reverse_rate_nsprefix_ + ':' if (UseCapturedNS_ and self.reverse_rate_nsprefix_) else '' self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) if self.steady_state is not None: + namespaceprefix_ = self.steady_state_nsprefix_ + ':' if (UseCapturedNS_ and self.steady_state_nsprefix_) else '' self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('instances', node) if value is not None and 'instances' not in already_processed: already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.instances = self.gds_parse_integer(value, node, 'instances') if self.instances <= 0: raise_parse_error(node, 'Invalid PositiveInteger') self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateHHRatesInf, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(GateHHRatesInf, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'notes') + value_ = self.gds_validate_string(value_, node, 'notes') + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) elif nodeName_ == 'q10Settings': obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ obj_.original_tagname_ = 'q10Settings' elif nodeName_ == 'forwardRate': obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.forward_rate = obj_ obj_.original_tagname_ = 'forwardRate' elif nodeName_ == 'reverseRate': obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.reverse_rate = obj_ obj_.original_tagname_ = 'reverseRate' elif nodeName_ == 'steadyState': obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ obj_.original_tagname_ = 'steadyState' - super(GateHHRatesInf, self).buildChildren(child_, node, nodeName_, True) + super(GateHHRatesInf, self)._buildChildren(child_, node, nodeName_, True) # end class GateHHRatesInf class GateHHRatesTau(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'1'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'1'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'1'}, None), + MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': 'required', 'name': 'instances'}), + MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {'minOccurs': '0', 'name': 'notes', 'type': 'xs:string'}, None), + MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {'minOccurs': '0', 'name': 'q10Settings', 'type': 'Q10Settings'}, None), + MemberSpec_('forward_rate', 'HHRate', 0, 0, {'minOccurs': '1', 'name': 'forwardRate', 'type': 'HHRate'}, None), + MemberSpec_('reverse_rate', 'HHRate', 0, 0, {'minOccurs': '1', 'name': 'reverseRate', 'type': 'HHRate'}, None), + MemberSpec_('time_course', 'HHTime', 0, 0, {'minOccurs': '1', 'name': 'timeCourse', 'type': 'HHTime'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, time_course=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, time_course=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHRatesTau, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("GateHHRatesTau"), self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.forward_rate = forward_rate + self.forward_rate_nsprefix_ = None self.reverse_rate = reverse_rate + self.reverse_rate_nsprefix_ = None self.time_course = time_course + self.time_course_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -13610,27 +18249,65 @@ def factory(*args_, **kwargs_): else: return GateHHRatesTau(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_notes(self): + return self.notes + def set_notes(self, notes): + self.notes = notes + def get_q10Settings(self): + return self.q10_settings + def set_q10Settings(self, q10_settings): + self.q10_settings = q10_settings + def get_forwardRate(self): + return self.forward_rate + def set_forwardRate(self, forward_rate): + self.forward_rate = forward_rate + def get_reverseRate(self): + return self.reverse_rate + def set_reverseRate(self, reverse_rate): + self.reverse_rate = reverse_rate + def get_timeCourse(self): + return self.time_course + def set_timeCourse(self, time_course): + self.time_course = time_course + def get_instances(self): + return self.instances + def set_instances(self, instances): + self.instances = instances def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False pass + return result def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + def _hasContent(self): if ( self.notes is not None or self.q10_settings is not None or self.forward_rate is not None or self.reverse_rate is not None or self.time_course is not None or - super(GateHHRatesTau, self).hasContent_() + super(GateHHRatesTau, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesTau', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHRatesTau', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHRatesTau') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -13638,115 +18315,136 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GateHHRatesTau': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTau') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTau') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRatesTau', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRatesTau', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRatesTau'): - super(GateHHRatesTau, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTau') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRatesTau'): + super(GateHHRatesTau, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTau') if self.instances is not None and 'instances' not in already_processed: already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesTau', fromsubclass_=False, pretty_print=True): - super(GateHHRatesTau, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' instances="%s"' % self.gds_format_integer(self.instances, input_name='instances')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHRatesTau', fromsubclass_=False, pretty_print=True): + super(GateHHRatesTau, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.notes is not None: + namespaceprefix_ = self.notes_nsprefix_ + ':' if (UseCapturedNS_ and self.notes_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) if self.q10_settings is not None: + namespaceprefix_ = self.q10_settings_nsprefix_ + ':' if (UseCapturedNS_ and self.q10_settings_nsprefix_) else '' self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) if self.forward_rate is not None: + namespaceprefix_ = self.forward_rate_nsprefix_ + ':' if (UseCapturedNS_ and self.forward_rate_nsprefix_) else '' self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) if self.reverse_rate is not None: + namespaceprefix_ = self.reverse_rate_nsprefix_ + ':' if (UseCapturedNS_ and self.reverse_rate_nsprefix_) else '' self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) if self.time_course is not None: + namespaceprefix_ = self.time_course_nsprefix_ + ':' if (UseCapturedNS_ and self.time_course_nsprefix_) else '' self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('instances', node) if value is not None and 'instances' not in already_processed: already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.instances = self.gds_parse_integer(value, node, 'instances') if self.instances <= 0: raise_parse_error(node, 'Invalid PositiveInteger') self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateHHRatesTau, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(GateHHRatesTau, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'notes') + value_ = self.gds_validate_string(value_, node, 'notes') + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) elif nodeName_ == 'q10Settings': obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ obj_.original_tagname_ = 'q10Settings' elif nodeName_ == 'forwardRate': obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.forward_rate = obj_ obj_.original_tagname_ = 'forwardRate' elif nodeName_ == 'reverseRate': obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.reverse_rate = obj_ obj_.original_tagname_ = 'reverseRate' elif nodeName_ == 'timeCourse': obj_ = HHTime.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.time_course = obj_ obj_.original_tagname_ = 'timeCourse' - super(GateHHRatesTau, self).buildChildren(child_, node, nodeName_, True) + super(GateHHRatesTau, self)._buildChildren(child_, node, nodeName_, True) # end class GateHHRatesTau class GateHHRatesTauInf(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'1'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'1'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'1'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), + MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': 'required', 'name': 'instances'}), + MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {'minOccurs': '0', 'name': 'notes', 'type': 'xs:string'}, None), + MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {'minOccurs': '0', 'name': 'q10Settings', 'type': 'Q10Settings'}, None), + MemberSpec_('forward_rate', 'HHRate', 0, 0, {'minOccurs': '1', 'name': 'forwardRate', 'type': 'HHRate'}, None), + MemberSpec_('reverse_rate', 'HHRate', 0, 0, {'minOccurs': '1', 'name': 'reverseRate', 'type': 'HHRate'}, None), + MemberSpec_('time_course', 'HHTime', 0, 0, {'minOccurs': '1', 'name': 'timeCourse', 'type': 'HHTime'}, None), + MemberSpec_('steady_state', 'HHVariable', 0, 0, {'minOccurs': '1', 'name': 'steadyState', 'type': 'HHVariable'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, time_course=None, steady_state=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, time_course=None, steady_state=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHRatesTauInf, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("GateHHRatesTauInf"), self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.forward_rate = forward_rate + self.forward_rate_nsprefix_ = None self.reverse_rate = reverse_rate + self.reverse_rate_nsprefix_ = None self.time_course = time_course + self.time_course_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -13758,15 +18456,57 @@ def factory(*args_, **kwargs_): else: return GateHHRatesTauInf(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_notes(self): + return self.notes + def set_notes(self, notes): + self.notes = notes + def get_q10Settings(self): + return self.q10_settings + def set_q10Settings(self, q10_settings): + self.q10_settings = q10_settings + def get_forwardRate(self): + return self.forward_rate + def set_forwardRate(self, forward_rate): + self.forward_rate = forward_rate + def get_reverseRate(self): + return self.reverse_rate + def set_reverseRate(self, reverse_rate): + self.reverse_rate = reverse_rate + def get_timeCourse(self): + return self.time_course + def set_timeCourse(self, time_course): + self.time_course = time_course + def get_steadyState(self): + return self.steady_state + def set_steadyState(self, steady_state): + self.steady_state = steady_state + def get_instances(self): + return self.instances + def set_instances(self, instances): + self.instances = instances def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False pass + return result def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + def _hasContent(self): if ( self.notes is not None or self.q10_settings is not None or @@ -13774,12 +18514,12 @@ def hasContent_(self): self.reverse_rate is not None or self.time_course is not None or self.steady_state is not None or - super(GateHHRatesTauInf, self).hasContent_() + super(GateHHRatesTauInf, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesTauInf', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHRatesTauInf', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHRatesTauInf') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -13787,118 +18527,138 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GateHHRatesTauInf': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTauInf') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTauInf') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRatesTauInf', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRatesTauInf', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRatesTauInf'): - super(GateHHRatesTauInf, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTauInf') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRatesTauInf'): + super(GateHHRatesTauInf, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTauInf') if self.instances is not None and 'instances' not in already_processed: already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesTauInf', fromsubclass_=False, pretty_print=True): - super(GateHHRatesTauInf, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' instances="%s"' % self.gds_format_integer(self.instances, input_name='instances')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHRatesTauInf', fromsubclass_=False, pretty_print=True): + super(GateHHRatesTauInf, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.notes is not None: + namespaceprefix_ = self.notes_nsprefix_ + ':' if (UseCapturedNS_ and self.notes_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) if self.q10_settings is not None: + namespaceprefix_ = self.q10_settings_nsprefix_ + ':' if (UseCapturedNS_ and self.q10_settings_nsprefix_) else '' self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) if self.forward_rate is not None: + namespaceprefix_ = self.forward_rate_nsprefix_ + ':' if (UseCapturedNS_ and self.forward_rate_nsprefix_) else '' self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) if self.reverse_rate is not None: + namespaceprefix_ = self.reverse_rate_nsprefix_ + ':' if (UseCapturedNS_ and self.reverse_rate_nsprefix_) else '' self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) if self.time_course is not None: + namespaceprefix_ = self.time_course_nsprefix_ + ':' if (UseCapturedNS_ and self.time_course_nsprefix_) else '' self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) if self.steady_state is not None: + namespaceprefix_ = self.steady_state_nsprefix_ + ':' if (UseCapturedNS_ and self.steady_state_nsprefix_) else '' self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('instances', node) if value is not None and 'instances' not in already_processed: already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.instances = self.gds_parse_integer(value, node, 'instances') if self.instances <= 0: raise_parse_error(node, 'Invalid PositiveInteger') self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateHHRatesTauInf, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(GateHHRatesTauInf, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'notes') + value_ = self.gds_validate_string(value_, node, 'notes') + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) elif nodeName_ == 'q10Settings': obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ obj_.original_tagname_ = 'q10Settings' elif nodeName_ == 'forwardRate': obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.forward_rate = obj_ obj_.original_tagname_ = 'forwardRate' elif nodeName_ == 'reverseRate': obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.reverse_rate = obj_ obj_.original_tagname_ = 'reverseRate' elif nodeName_ == 'timeCourse': obj_ = HHTime.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.time_course = obj_ obj_.original_tagname_ = 'timeCourse' elif nodeName_ == 'steadyState': obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ obj_.original_tagname_ = 'steadyState' - super(GateHHRatesTauInf, self).buildChildren(child_, node, nodeName_, True) + super(GateHHRatesTauInf, self)._buildChildren(child_, node, nodeName_, True) # end class GateHHRatesTauInf class GateHHTauInf(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'1'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), + MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': 'required', 'name': 'instances'}), + MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {'minOccurs': '0', 'name': 'notes', 'type': 'xs:string'}, None), + MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {'minOccurs': '0', 'name': 'q10Settings', 'type': 'Q10Settings'}, None), + MemberSpec_('time_course', 'HHTime', 0, 0, {'minOccurs': '1', 'name': 'timeCourse', 'type': 'HHTime'}, None), + MemberSpec_('steady_state', 'HHVariable', 0, 0, {'minOccurs': '1', 'name': 'steadyState', 'type': 'HHVariable'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, time_course=None, steady_state=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, time_course=None, steady_state=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHTauInf, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("GateHHTauInf"), self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.time_course = time_course + self.time_course_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -13910,26 +18670,60 @@ def factory(*args_, **kwargs_): else: return GateHHTauInf(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_notes(self): + return self.notes + def set_notes(self, notes): + self.notes = notes + def get_q10Settings(self): + return self.q10_settings + def set_q10Settings(self, q10_settings): + self.q10_settings = q10_settings + def get_timeCourse(self): + return self.time_course + def set_timeCourse(self, time_course): + self.time_course = time_course + def get_steadyState(self): + return self.steady_state + def set_steadyState(self, steady_state): + self.steady_state = steady_state + def get_instances(self): + return self.instances + def set_instances(self, instances): + self.instances = instances def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False pass + return result def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + def _hasContent(self): if ( self.notes is not None or self.q10_settings is not None or self.time_course is not None or self.steady_state is not None or - super(GateHHTauInf, self).hasContent_() + super(GateHHTauInf, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHTauInf', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHTauInf', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHTauInf') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -13937,104 +18731,122 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GateHHTauInf': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHTauInf') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHTauInf') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHTauInf', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHTauInf', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHTauInf'): - super(GateHHTauInf, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHTauInf') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHTauInf'): + super(GateHHTauInf, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHTauInf') if self.instances is not None and 'instances' not in already_processed: already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHTauInf', fromsubclass_=False, pretty_print=True): - super(GateHHTauInf, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' instances="%s"' % self.gds_format_integer(self.instances, input_name='instances')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHTauInf', fromsubclass_=False, pretty_print=True): + super(GateHHTauInf, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.notes is not None: + namespaceprefix_ = self.notes_nsprefix_ + ':' if (UseCapturedNS_ and self.notes_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) if self.q10_settings is not None: + namespaceprefix_ = self.q10_settings_nsprefix_ + ':' if (UseCapturedNS_ and self.q10_settings_nsprefix_) else '' self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) if self.time_course is not None: + namespaceprefix_ = self.time_course_nsprefix_ + ':' if (UseCapturedNS_ and self.time_course_nsprefix_) else '' self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) if self.steady_state is not None: + namespaceprefix_ = self.steady_state_nsprefix_ + ':' if (UseCapturedNS_ and self.steady_state_nsprefix_) else '' self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('instances', node) if value is not None and 'instances' not in already_processed: already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.instances = self.gds_parse_integer(value, node, 'instances') if self.instances <= 0: raise_parse_error(node, 'Invalid PositiveInteger') self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateHHTauInf, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(GateHHTauInf, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'notes') + value_ = self.gds_validate_string(value_, node, 'notes') + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) elif nodeName_ == 'q10Settings': obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ obj_.original_tagname_ = 'q10Settings' elif nodeName_ == 'timeCourse': obj_ = HHTime.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.time_course = obj_ obj_.original_tagname_ = 'timeCourse' elif nodeName_ == 'steadyState': obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ obj_.original_tagname_ = 'steadyState' - super(GateHHTauInf, self).buildChildren(child_, node, nodeName_, True) + super(GateHHTauInf, self)._buildChildren(child_, node, nodeName_, True) # end class GateHHTauInf class GateHHRates(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'1'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'1'}, None), + MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': 'required', 'name': 'instances'}), + MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {'minOccurs': '0', 'name': 'notes', 'type': 'xs:string'}, None), + MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {'minOccurs': '0', 'name': 'q10Settings', 'type': 'Q10Settings'}, None), + MemberSpec_('forward_rate', 'HHRate', 0, 0, {'minOccurs': '1', 'name': 'forwardRate', 'type': 'HHRate'}, None), + MemberSpec_('reverse_rate', 'HHRate', 0, 0, {'minOccurs': '1', 'name': 'reverseRate', 'type': 'HHRate'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHRates, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("GateHHRates"), self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.forward_rate = forward_rate + self.forward_rate_nsprefix_ = None self.reverse_rate = reverse_rate + self.reverse_rate_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -14046,26 +18858,60 @@ def factory(*args_, **kwargs_): else: return GateHHRates(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_notes(self): + return self.notes + def set_notes(self, notes): + self.notes = notes + def get_q10Settings(self): + return self.q10_settings + def set_q10Settings(self, q10_settings): + self.q10_settings = q10_settings + def get_forwardRate(self): + return self.forward_rate + def set_forwardRate(self, forward_rate): + self.forward_rate = forward_rate + def get_reverseRate(self): + return self.reverse_rate + def set_reverseRate(self, reverse_rate): + self.reverse_rate = reverse_rate + def get_instances(self): + return self.instances + def set_instances(self, instances): + self.instances = instances def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False pass + return result def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + def _hasContent(self): if ( self.notes is not None or self.q10_settings is not None or self.forward_rate is not None or self.reverse_rate is not None or - super(GateHHRates, self).hasContent_() + super(GateHHRates, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRates', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHRates', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHRates') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -14073,118 +18919,140 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GateHHRates': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRates') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRates') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRates', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRates', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRates'): - super(GateHHRates, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRates') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRates'): + super(GateHHRates, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRates') if self.instances is not None and 'instances' not in already_processed: already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRates', fromsubclass_=False, pretty_print=True): - super(GateHHRates, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' instances="%s"' % self.gds_format_integer(self.instances, input_name='instances')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHRates', fromsubclass_=False, pretty_print=True): + super(GateHHRates, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.notes is not None: + namespaceprefix_ = self.notes_nsprefix_ + ':' if (UseCapturedNS_ and self.notes_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) if self.q10_settings is not None: + namespaceprefix_ = self.q10_settings_nsprefix_ + ':' if (UseCapturedNS_ and self.q10_settings_nsprefix_) else '' self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) if self.forward_rate is not None: + namespaceprefix_ = self.forward_rate_nsprefix_ + ':' if (UseCapturedNS_ and self.forward_rate_nsprefix_) else '' self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) if self.reverse_rate is not None: + namespaceprefix_ = self.reverse_rate_nsprefix_ + ':' if (UseCapturedNS_ and self.reverse_rate_nsprefix_) else '' self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('instances', node) if value is not None and 'instances' not in already_processed: already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.instances = self.gds_parse_integer(value, node, 'instances') if self.instances <= 0: raise_parse_error(node, 'Invalid PositiveInteger') self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateHHRates, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(GateHHRates, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'notes') + value_ = self.gds_validate_string(value_, node, 'notes') + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) elif nodeName_ == 'q10Settings': obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ obj_.original_tagname_ = 'q10Settings' elif nodeName_ == 'forwardRate': obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.forward_rate = obj_ obj_.original_tagname_ = 'forwardRate' elif nodeName_ == 'reverseRate': obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.reverse_rate = obj_ obj_.original_tagname_ = 'reverseRate' - super(GateHHRates, self).buildChildren(child_, node, nodeName_, True) + super(GateHHRates, self)._buildChildren(child_, node, nodeName_, True) # end class GateHHRates class GateHHUndetermined(Base): - """Note all sub elements for gateHHrates, gateHHratesTau, - gateFractional etc. allowed here. Which are valid should be - constrained by what type is set""" + """GateHHUndetermined -- Note all sub elements for gateHHrates, gateHHratesTau, gateFractional etc. allowed here. Which are valid should be constrained by what type is set + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('type', 'gateTypes', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 1, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'0'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 1, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'0'}, None), - MemberSpec_('time_course', 'HHTime', 0, 1, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'0'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 1, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'0'}, None), - MemberSpec_('sub_gates', 'GateFractionalSubgate', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateFractionalSubgate', u'name': u'subGate', u'minOccurs': u'0'}, None), + MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': 'required', 'name': 'instances'}), + MemberSpec_('type', 'gateTypes', 0, 0, {'use': 'required', 'name': 'type'}), + MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {'minOccurs': '0', 'name': 'notes', 'type': 'xs:string'}, None), + MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {'minOccurs': '0', 'name': 'q10Settings', 'type': 'Q10Settings'}, None), + MemberSpec_('forward_rate', 'HHRate', 0, 1, {'minOccurs': '0', 'name': 'forwardRate', 'type': 'HHRate'}, None), + MemberSpec_('reverse_rate', 'HHRate', 0, 1, {'minOccurs': '0', 'name': 'reverseRate', 'type': 'HHRate'}, None), + MemberSpec_('time_course', 'HHTime', 0, 1, {'minOccurs': '0', 'name': 'timeCourse', 'type': 'HHTime'}, None), + MemberSpec_('steady_state', 'HHVariable', 0, 1, {'minOccurs': '0', 'name': 'steadyState', 'type': 'HHVariable'}, None), + MemberSpec_('sub_gates', 'GateFractionalSubgate', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'subGate', 'type': 'GateFractionalSubgate'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, type=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, time_course=None, steady_state=None, sub_gates=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, instances=None, type=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, time_course=None, steady_state=None, sub_gates=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHUndetermined, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("GateHHUndetermined"), self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.forward_rate = forward_rate + self.forward_rate_nsprefix_ = None self.reverse_rate = reverse_rate + self.reverse_rate_nsprefix_ = None self.time_course = time_course + self.time_course_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None if sub_gates is None: self.sub_gates = [] else: self.sub_gates = sub_gates + self.sub_gates_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -14196,27 +19064,84 @@ def factory(*args_, **kwargs_): else: return GateHHUndetermined(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_notes(self): + return self.notes + def set_notes(self, notes): + self.notes = notes + def get_q10Settings(self): + return self.q10_settings + def set_q10Settings(self, q10_settings): + self.q10_settings = q10_settings + def get_forwardRate(self): + return self.forward_rate + def set_forwardRate(self, forward_rate): + self.forward_rate = forward_rate + def get_reverseRate(self): + return self.reverse_rate + def set_reverseRate(self, reverse_rate): + self.reverse_rate = reverse_rate + def get_timeCourse(self): + return self.time_course + def set_timeCourse(self, time_course): + self.time_course = time_course + def get_steadyState(self): + return self.steady_state + def set_steadyState(self, steady_state): + self.steady_state = steady_state + def get_subGate(self): + return self.sub_gates + def set_subGate(self, sub_gates): + self.sub_gates = sub_gates + def add_subGate(self, value): + self.sub_gates.append(value) + def insert_subGate_at(self, index, value): + self.sub_gates.insert(index, value) + def replace_subGate_at(self, index, value): + self.sub_gates[index] = value + def get_instances(self): + return self.instances + def set_instances(self, instances): + self.instances = instances + def get_type(self): + return self.type + def set_type(self, type): + self.type = type def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False pass + return result def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass def validate_gateTypes(self, value): # Validate type gateTypes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False + value = value enumerations = ['gateHHrates', 'gateHHratesTau', 'gateHHtauInf', 'gateHHratesInf', 'gateHHratesTauInf', 'gateHHInstantaneous', 'gateKS', 'gateFractional'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on gateTypes' % {"value" : value.encode("utf-8")} ) - def hasContent_(self): + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on gateTypes' % {"value" : encode_str_2_3(value), "lineno": lineno} ) + result = False + def _hasContent(self): if ( self.notes is not None or self.q10_settings is not None or @@ -14225,12 +19150,12 @@ def hasContent_(self): self.time_course is not None or self.steady_state is not None or self.sub_gates or - super(GateHHUndetermined, self).hasContent_() + super(GateHHUndetermined, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHUndetermined', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHUndetermined', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHUndetermined') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -14238,63 +19163,73 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GateHHUndetermined': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHUndetermined') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHUndetermined') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHUndetermined', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHUndetermined', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHUndetermined'): - super(GateHHUndetermined, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHUndetermined') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHUndetermined'): + super(GateHHUndetermined, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHUndetermined') if self.instances is not None and 'instances' not in already_processed: already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) + outfile.write(' instances="%s"' % self.gds_format_integer(self.instances, input_name='instances')) if self.type is not None and 'type' not in already_processed: already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHUndetermined', fromsubclass_=False, pretty_print=True): - super(GateHHUndetermined, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateHHUndetermined', fromsubclass_=False, pretty_print=True): + super(GateHHUndetermined, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.notes is not None: + namespaceprefix_ = self.notes_nsprefix_ + ':' if (UseCapturedNS_ and self.notes_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) if self.q10_settings is not None: + namespaceprefix_ = self.q10_settings_nsprefix_ + ':' if (UseCapturedNS_ and self.q10_settings_nsprefix_) else '' self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) if self.forward_rate is not None: + namespaceprefix_ = self.forward_rate_nsprefix_ + ':' if (UseCapturedNS_ and self.forward_rate_nsprefix_) else '' self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) if self.reverse_rate is not None: + namespaceprefix_ = self.reverse_rate_nsprefix_ + ':' if (UseCapturedNS_ and self.reverse_rate_nsprefix_) else '' self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) if self.time_course is not None: + namespaceprefix_ = self.time_course_nsprefix_ + ':' if (UseCapturedNS_ and self.time_course_nsprefix_) else '' self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) if self.steady_state is not None: + namespaceprefix_ = self.steady_state_nsprefix_ + ':' if (UseCapturedNS_ and self.steady_state_nsprefix_) else '' self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) for subGate_ in self.sub_gates: + namespaceprefix_ = self.sub_gates_nsprefix_ + ':' if (UseCapturedNS_ and self.sub_gates_nsprefix_) else '' subGate_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='subGate', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('instances', node) if value is not None and 'instances' not in already_processed: already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.instances = self.gds_parse_integer(value, node, 'instances') if self.instances <= 0: raise_parse_error(node, 'Invalid PositiveInteger') self.validate_PositiveInteger(self.instances) # validate type PositiveInteger @@ -14303,89 +19238,103 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('type') self.type = value self.validate_gateTypes(self.type) # validate type gateTypes - super(GateHHUndetermined, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(GateHHUndetermined, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'notes') + value_ = self.gds_validate_string(value_, node, 'notes') + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) elif nodeName_ == 'q10Settings': obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ obj_.original_tagname_ = 'q10Settings' elif nodeName_ == 'forwardRate': obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.forward_rate = obj_ obj_.original_tagname_ = 'forwardRate' elif nodeName_ == 'reverseRate': obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.reverse_rate = obj_ obj_.original_tagname_ = 'reverseRate' elif nodeName_ == 'timeCourse': obj_ = HHTime.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.time_course = obj_ obj_.original_tagname_ = 'timeCourse' elif nodeName_ == 'steadyState': obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ obj_.original_tagname_ = 'steadyState' elif nodeName_ == 'subGate': obj_ = GateFractionalSubgate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sub_gates.append(obj_) obj_.original_tagname_ = 'subGate' - super(GateHHUndetermined, self).buildChildren(child_, node, nodeName_, True) + super(GateHHUndetermined, self)._buildChildren(child_, node, nodeName_, True) # end class GateHHUndetermined class GateKS(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('closed_states', 'ClosedState', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'ClosedState', u'name': u'closedState', u'minOccurs': u'1'}, None), - MemberSpec_('open_states', 'OpenState', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'OpenState', u'name': u'openState', u'minOccurs': u'1'}, None), - MemberSpec_('forward_transition', 'ForwardTransition', 1, 0, {u'type': u'ForwardTransition', u'name': u'forwardTransition'}, 2), - MemberSpec_('reverse_transition', 'ReverseTransition', 1, 0, {u'type': u'ReverseTransition', u'name': u'reverseTransition'}, 2), - MemberSpec_('tau_inf_transition', 'TauInfTransition', 1, 0, {u'type': u'TauInfTransition', u'name': u'tauInfTransition'}, 2), + MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': 'required', 'name': 'instances'}), + MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {'minOccurs': '0', 'name': 'notes', 'type': 'xs:string'}, None), + MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {'minOccurs': '0', 'name': 'q10Settings', 'type': 'Q10Settings'}, None), + MemberSpec_('closed_states', 'ClosedState', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '1', 'name': 'closedState', 'type': 'ClosedState'}, None), + MemberSpec_('open_states', 'OpenState', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '1', 'name': 'openState', 'type': 'OpenState'}, None), + MemberSpec_('forward_transition', 'ForwardTransition', 1, 0, {'name': 'forwardTransition', 'type': 'ForwardTransition'}, 2), + MemberSpec_('reverse_transition', 'ReverseTransition', 1, 0, {'name': 'reverseTransition', 'type': 'ReverseTransition'}, 2), + MemberSpec_('tau_inf_transition', 'TauInfTransition', 1, 0, {'name': 'tauInfTransition', 'type': 'TauInfTransition'}, 2), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, closed_states=None, open_states=None, forward_transition=None, reverse_transition=None, tau_inf_transition=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, closed_states=None, open_states=None, forward_transition=None, reverse_transition=None, tau_inf_transition=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(GateKS, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("GateKS"), self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None if closed_states is None: self.closed_states = [] else: self.closed_states = closed_states + self.closed_states_nsprefix_ = None if open_states is None: self.open_states = [] else: self.open_states = open_states + self.open_states_nsprefix_ = None if forward_transition is None: self.forward_transition = [] else: self.forward_transition = forward_transition + self.forward_transition_nsprefix_ = None if reverse_transition is None: self.reverse_transition = [] else: self.reverse_transition = reverse_transition + self.reverse_transition_nsprefix_ = None if tau_inf_transition is None: self.tau_inf_transition = [] else: self.tau_inf_transition = tau_inf_transition + self.tau_inf_transition_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -14397,15 +19346,91 @@ def factory(*args_, **kwargs_): else: return GateKS(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_notes(self): + return self.notes + def set_notes(self, notes): + self.notes = notes + def get_q10Settings(self): + return self.q10_settings + def set_q10Settings(self, q10_settings): + self.q10_settings = q10_settings + def get_closedState(self): + return self.closed_states + def set_closedState(self, closed_states): + self.closed_states = closed_states + def add_closedState(self, value): + self.closed_states.append(value) + def insert_closedState_at(self, index, value): + self.closed_states.insert(index, value) + def replace_closedState_at(self, index, value): + self.closed_states[index] = value + def get_openState(self): + return self.open_states + def set_openState(self, open_states): + self.open_states = open_states + def add_openState(self, value): + self.open_states.append(value) + def insert_openState_at(self, index, value): + self.open_states.insert(index, value) + def replace_openState_at(self, index, value): + self.open_states[index] = value + def get_forwardTransition(self): + return self.forward_transition + def set_forwardTransition(self, forward_transition): + self.forward_transition = forward_transition + def add_forwardTransition(self, value): + self.forward_transition.append(value) + def insert_forwardTransition_at(self, index, value): + self.forward_transition.insert(index, value) + def replace_forwardTransition_at(self, index, value): + self.forward_transition[index] = value + def get_reverseTransition(self): + return self.reverse_transition + def set_reverseTransition(self, reverse_transition): + self.reverse_transition = reverse_transition + def add_reverseTransition(self, value): + self.reverse_transition.append(value) + def insert_reverseTransition_at(self, index, value): + self.reverse_transition.insert(index, value) + def replace_reverseTransition_at(self, index, value): + self.reverse_transition[index] = value + def get_tauInfTransition(self): + return self.tau_inf_transition + def set_tauInfTransition(self, tau_inf_transition): + self.tau_inf_transition = tau_inf_transition + def add_tauInfTransition(self, value): + self.tau_inf_transition.append(value) + def insert_tauInfTransition_at(self, index, value): + self.tau_inf_transition.insert(index, value) + def replace_tauInfTransition_at(self, index, value): + self.tau_inf_transition[index] = value + def get_instances(self): + return self.instances + def set_instances(self, instances): + self.instances = instances def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False pass + return result def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass - def hasContent_(self): + def _hasContent(self): if ( self.notes is not None or self.q10_settings is not None or @@ -14414,12 +19439,12 @@ def hasContent_(self): self.forward_transition or self.reverse_transition or self.tau_inf_transition or - super(GateKS, self).hasContent_() + super(GateKS, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateKS', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateKS', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateKS') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -14427,122 +19452,142 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GateKS': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateKS') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateKS') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateKS', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateKS', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateKS'): - super(GateKS, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateKS') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateKS'): + super(GateKS, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateKS') if self.instances is not None and 'instances' not in already_processed: already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateKS', fromsubclass_=False, pretty_print=True): - super(GateKS, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' instances="%s"' % self.gds_format_integer(self.instances, input_name='instances')) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='GateKS', fromsubclass_=False, pretty_print=True): + super(GateKS, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.notes is not None: + namespaceprefix_ = self.notes_nsprefix_ + ':' if (UseCapturedNS_ and self.notes_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) if self.q10_settings is not None: + namespaceprefix_ = self.q10_settings_nsprefix_ + ':' if (UseCapturedNS_ and self.q10_settings_nsprefix_) else '' self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) for closedState_ in self.closed_states: + namespaceprefix_ = self.closed_states_nsprefix_ + ':' if (UseCapturedNS_ and self.closed_states_nsprefix_) else '' closedState_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='closedState', pretty_print=pretty_print) for openState_ in self.open_states: + namespaceprefix_ = self.open_states_nsprefix_ + ':' if (UseCapturedNS_ and self.open_states_nsprefix_) else '' openState_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='openState', pretty_print=pretty_print) for forwardTransition_ in self.forward_transition: + namespaceprefix_ = self.forward_transition_nsprefix_ + ':' if (UseCapturedNS_ and self.forward_transition_nsprefix_) else '' forwardTransition_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardTransition', pretty_print=pretty_print) for reverseTransition_ in self.reverse_transition: + namespaceprefix_ = self.reverse_transition_nsprefix_ + ':' if (UseCapturedNS_ and self.reverse_transition_nsprefix_) else '' reverseTransition_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseTransition', pretty_print=pretty_print) for tauInfTransition_ in self.tau_inf_transition: + namespaceprefix_ = self.tau_inf_transition_nsprefix_ + ':' if (UseCapturedNS_ and self.tau_inf_transition_nsprefix_) else '' tauInfTransition_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='tauInfTransition', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('instances', node) if value is not None and 'instances' not in already_processed: already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.instances = self.gds_parse_integer(value, node, 'instances') if self.instances <= 0: raise_parse_error(node, 'Invalid PositiveInteger') self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateKS, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(GateKS, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + value_ = child_.text + value_ = self.gds_parse_string(value_, node, 'notes') + value_ = self.gds_validate_string(value_, node, 'notes') + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) elif nodeName_ == 'q10Settings': obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ obj_.original_tagname_ = 'q10Settings' elif nodeName_ == 'closedState': obj_ = ClosedState.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.closed_states.append(obj_) obj_.original_tagname_ = 'closedState' elif nodeName_ == 'openState': obj_ = OpenState.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.open_states.append(obj_) obj_.original_tagname_ = 'openState' elif nodeName_ == 'forwardTransition': obj_ = ForwardTransition.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.forward_transition.append(obj_) obj_.original_tagname_ = 'forwardTransition' elif nodeName_ == 'reverseTransition': obj_ = ReverseTransition.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.reverse_transition.append(obj_) obj_.original_tagname_ = 'reverseTransition' elif nodeName_ == 'tauInfTransition': obj_ = TauInfTransition.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.tau_inf_transition.append(obj_) obj_.original_tagname_ = 'tauInfTransition' - super(GateKS, self).buildChildren(child_, node, nodeName_, True) + super(GateKS, self)._buildChildren(child_, node, nodeName_, True) # end class GateKS class TauInfTransition(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('from_', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('to', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse'}, None), + MemberSpec_('from_', 'NmlId', 0, 0, {'use': 'required', 'name': 'from_'}), + MemberSpec_('to', 'NmlId', 0, 0, {'use': 'required', 'name': 'to'}), + MemberSpec_('steady_state', 'HHVariable', 0, 0, {'name': 'steadyState', 'type': 'HHVariable'}, None), + MemberSpec_('time_course', 'HHTime', 0, 0, {'name': 'timeCourse', 'type': 'HHTime'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, from_=None, to=None, steady_state=None, time_course=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, from_=None, to=None, steady_state=None, time_course=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(TauInfTransition, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("TauInfTransition"), self).__init__(neuro_lex_id, id, **kwargs_) self.from_ = _cast(None, from_) + self.from__nsprefix_ = None self.to = _cast(None, to) + self.to_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None self.time_course = time_course + self.time_course_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -14554,23 +19599,47 @@ def factory(*args_, **kwargs_): else: return TauInfTransition(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_steadyState(self): + return self.steady_state + def set_steadyState(self, steady_state): + self.steady_state = steady_state + def get_timeCourse(self): + return self.time_course + def set_timeCourse(self, time_course): + self.time_course = time_course + def get_from(self): + return self.from_ + def set_from(self, from_): + self.from_ = from_ + def get_to(self): + return self.to + def set_to(self, to): + self.to = to def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( self.steady_state is not None or self.time_course is not None or - super(TauInfTransition, self).hasContent_() + super(TauInfTransition, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TauInfTransition', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='TauInfTransition', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TauInfTransition') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -14578,45 +19647,53 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='T eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'TauInfTransition': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TauInfTransition') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TauInfTransition') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TauInfTransition', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TauInfTransition', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TauInfTransition'): - super(TauInfTransition, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TauInfTransition') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TauInfTransition'): + super(TauInfTransition, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TauInfTransition') if self.from_ is not None and 'from_' not in already_processed: already_processed.add('from_') - outfile.write(' from=%s' % (quote_attrib(self.from_), )) + outfile.write(' from=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.from_), input_name='from')), )) if self.to is not None and 'to' not in already_processed: already_processed.add('to') - outfile.write(' to=%s' % (quote_attrib(self.to), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TauInfTransition', fromsubclass_=False, pretty_print=True): - super(TauInfTransition, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' to=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.to), input_name='to')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='TauInfTransition', fromsubclass_=False, pretty_print=True): + super(TauInfTransition, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.steady_state is not None: + namespaceprefix_ = self.steady_state_nsprefix_ + ':' if (UseCapturedNS_ and self.steady_state_nsprefix_) else '' self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) if self.time_course is not None: + namespaceprefix_ = self.time_course_nsprefix_ + ':' if (UseCapturedNS_ and self.time_course_nsprefix_) else '' self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('from', node) if value is not None and 'from' not in already_processed: already_processed.add('from') @@ -14627,36 +19704,42 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('to') self.to = value self.validate_NmlId(self.to) # validate type NmlId - super(TauInfTransition, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(TauInfTransition, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'steadyState': obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ obj_.original_tagname_ = 'steadyState' elif nodeName_ == 'timeCourse': obj_ = HHTime.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.time_course = obj_ obj_.original_tagname_ = 'timeCourse' - super(TauInfTransition, self).buildChildren(child_, node, nodeName_, True) + super(TauInfTransition, self)._buildChildren(child_, node, nodeName_, True) # end class TauInfTransition class ReverseTransition(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('from_', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('to', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_('from_', 'NmlId', 0, 0, {'use': 'required', 'name': 'from_'}), + MemberSpec_('to', 'NmlId', 0, 0, {'use': 'required', 'name': 'to'}), + MemberSpec_('__ANY__', '__ANY__', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'processContents': 'skip'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, from_=None, to=None, anytypeobjs_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, from_=None, to=None, anytypeobjs_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ReverseTransition, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ReverseTransition"), self).__init__(neuro_lex_id, id, **kwargs_) self.from_ = _cast(None, from_) + self.from__nsprefix_ = None self.to = _cast(None, to) + self.to_nsprefix_ = None if anytypeobjs_ is None: self.anytypeobjs_ = [] else: @@ -14672,22 +19755,42 @@ def factory(*args_, **kwargs_): else: return ReverseTransition(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_anytypeobjs_(self): return self.anytypeobjs_ + def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_ + def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value) + def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value + def get_from(self): + return self.from_ + def set_from(self, from_): + self.from_ = from_ + def get_to(self): + return self.to + def set_to(self, to): + self.to = to def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( self.anytypeobjs_ or - super(ReverseTransition, self).hasContent_() + super(ReverseTransition, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReverseTransition', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ReverseTransition', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReverseTransition') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -14695,43 +19798,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='R eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ReverseTransition': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReverseTransition') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReverseTransition') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ReverseTransition', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ReverseTransition', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ReverseTransition'): - super(ReverseTransition, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReverseTransition') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ReverseTransition'): + super(ReverseTransition, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReverseTransition') if self.from_ is not None and 'from_' not in already_processed: already_processed.add('from_') - outfile.write(' from=%s' % (quote_attrib(self.from_), )) + outfile.write(' from=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.from_), input_name='from')), )) if self.to is not None and 'to' not in already_processed: already_processed.add('to') - outfile.write(' to=%s' % (quote_attrib(self.to), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReverseTransition', fromsubclass_=False, pretty_print=True): - super(ReverseTransition, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' to=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.to), input_name='to')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ReverseTransition', fromsubclass_=False, pretty_print=True): + super(ReverseTransition, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' - for obj_ in self.anytypeobjs_: - obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if not fromsubclass_: + for obj_ in self.anytypeobjs_: + showIndent(outfile, level, pretty_print) + outfile.write(obj_) + outfile.write('\n') + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('from', node) if value is not None and 'from' not in already_processed: already_processed.add('from') @@ -14742,29 +19854,34 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('to') self.to = value self.validate_NmlId(self.to) # validate type NmlId - super(ReverseTransition, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'ReverseTransition') - if obj_ is not None: - self.add_anytypeobjs_(obj_) - super(ReverseTransition, self).buildChildren(child_, node, nodeName_, True) + super(ReverseTransition, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + content_ = self.gds_build_any(child_, 'ReverseTransition') + self.add_anytypeobjs_(content_) + super(ReverseTransition, self)._buildChildren(child_, node, nodeName_, True) # end class ReverseTransition class ForwardTransition(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('from_', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('to', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_('from_', 'NmlId', 0, 0, {'use': 'required', 'name': 'from_'}), + MemberSpec_('to', 'NmlId', 0, 0, {'use': 'required', 'name': 'to'}), + MemberSpec_('__ANY__', '__ANY__', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'processContents': 'skip'}, None), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, from_=None, to=None, anytypeobjs_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, from_=None, to=None, anytypeobjs_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ForwardTransition, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ForwardTransition"), self).__init__(neuro_lex_id, id, **kwargs_) self.from_ = _cast(None, from_) + self.from__nsprefix_ = None self.to = _cast(None, to) + self.to_nsprefix_ = None if anytypeobjs_ is None: self.anytypeobjs_ = [] else: @@ -14780,22 +19897,42 @@ def factory(*args_, **kwargs_): else: return ForwardTransition(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_anytypeobjs_(self): return self.anytypeobjs_ + def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_ + def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value) + def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value + def get_from(self): + return self.from_ + def set_from(self, from_): + self.from_ = from_ + def get_to(self): + return self.to + def set_to(self, to): + self.to = to def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( self.anytypeobjs_ or - super(ForwardTransition, self).hasContent_() + super(ForwardTransition, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ForwardTransition', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ForwardTransition', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ForwardTransition') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -14803,43 +19940,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='F eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ForwardTransition': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ForwardTransition') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ForwardTransition') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ForwardTransition', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ForwardTransition', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ForwardTransition'): - super(ForwardTransition, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ForwardTransition') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ForwardTransition'): + super(ForwardTransition, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ForwardTransition') if self.from_ is not None and 'from_' not in already_processed: already_processed.add('from_') - outfile.write(' from=%s' % (quote_attrib(self.from_), )) + outfile.write(' from=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.from_), input_name='from')), )) if self.to is not None and 'to' not in already_processed: already_processed.add('to') - outfile.write(' to=%s' % (quote_attrib(self.to), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ForwardTransition', fromsubclass_=False, pretty_print=True): - super(ForwardTransition, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' to=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.to), input_name='to')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ForwardTransition', fromsubclass_=False, pretty_print=True): + super(ForwardTransition, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' - for obj_ in self.anytypeobjs_: - obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if not fromsubclass_: + for obj_ in self.anytypeobjs_: + showIndent(outfile, level, pretty_print) + outfile.write(obj_) + outfile.write('\n') + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('from', node) if value is not None and 'from' not in already_processed: already_processed.add('from') @@ -14850,24 +19996,27 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('to') self.to = value self.validate_NmlId(self.to) # validate type NmlId - super(ForwardTransition, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'ForwardTransition') - if obj_ is not None: - self.add_anytypeobjs_(obj_) - super(ForwardTransition, self).buildChildren(child_, node, nodeName_, True) + super(ForwardTransition, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + content_ = self.gds_build_any(child_, 'ForwardTransition') + self.add_anytypeobjs_(content_) + super(ForwardTransition, self)._buildChildren(child_, node, nodeName_, True) # end class ForwardTransition class OpenState(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(OpenState, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("OpenState"), self).__init__(neuro_lex_id, id, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -14879,9 +20028,13 @@ def factory(*args_, **kwargs_): else: return OpenState(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(OpenState, self).hasContent_() + super(OpenState, self)._hasContent() ): return True else: @@ -14894,47 +20047,57 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='O eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'OpenState': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OpenState') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OpenState') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='OpenState', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='OpenState', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='OpenState'): - super(OpenState, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OpenState') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='OpenState', fromsubclass_=False, pretty_print=True): - super(OpenState, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='OpenState'): + super(OpenState, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OpenState') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='OpenState', fromsubclass_=False, pretty_print=True): + super(OpenState, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(OpenState, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(OpenState, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(OpenState, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(OpenState, self)._buildChildren(child_, node, nodeName_, True) pass # end class OpenState class ClosedState(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ClosedState, self).__init__(neuro_lex_id, id, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ClosedState"), self).__init__(neuro_lex_id, id, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -14946,9 +20109,13 @@ def factory(*args_, **kwargs_): else: return ClosedState(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(ClosedState, self).hasContent_() + super(ClosedState, self)._hasContent() ): return True else: @@ -14961,57 +20128,73 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ClosedState': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ClosedState') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ClosedState') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ClosedState', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ClosedState', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ClosedState'): - super(ClosedState, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ClosedState') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ClosedState', fromsubclass_=False, pretty_print=True): - super(ClosedState, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ClosedState'): + super(ClosedState, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ClosedState') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ClosedState', fromsubclass_=False, pretty_print=True): + super(ClosedState, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ClosedState, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ClosedState, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(ClosedState, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ClosedState, self)._buildChildren(child_, node, nodeName_, True) pass # end class ClosedState class IonChannelKS(Standalone): - """Kinetic scheme based ion channel.""" + """Kinetic scheme based ion channel. + IonChannelKS -- Kinetic scheme based ion channel. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('species', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 1, {'use': u'optional'}), - MemberSpec_('gate_kses', 'GateKS', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateKS', u'name': u'gateKS', u'minOccurs': u'0'}, None), + MemberSpec_('species', 'NmlId', 0, 1, {'use': 'optional', 'name': 'species'}), + MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 1, {'use': 'optional', 'name': 'conductance'}), + MemberSpec_('gate_kses', 'GateKS', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'gateKS', 'type': 'GateKS'}, None), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, species=None, conductance=None, gate_kses=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, species=None, conductance=None, gate_kses=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannelKS, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IonChannelKS"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.species = _cast(None, species) + self.species_nsprefix_ = None self.conductance = _cast(None, conductance) + self.conductance_nsprefix_ = None if gate_kses is None: self.gate_kses = [] else: self.gate_kses = gate_kses + self.gate_kses_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -15023,29 +20206,59 @@ def factory(*args_, **kwargs_): else: return IonChannelKS(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_gateKS(self): + return self.gate_kses + def set_gateKS(self, gate_kses): + self.gate_kses = gate_kses + def add_gateKS(self, value): + self.gate_kses.append(value) + def insert_gateKS_at(self, index, value): + self.gate_kses.insert(index, value) + def replace_gateKS_at(self, index, value): + self.gate_kses[index] = value + def get_species(self): + return self.species + def set_species(self, species): + self.species = species + def get_conductance(self): + return self.conductance + def set_conductance(self, conductance): + self.conductance = conductance def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductance_patterns_, )) + validate_Nml2Quantity_conductance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$']] + def _hasContent(self): if ( self.gate_kses or - super(IonChannelKS, self).hasContent_() + super(IonChannelKS, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelKS', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='IonChannelKS', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannelKS') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -15053,43 +20266,50 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IonChannelKS': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelKS') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelKS') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelKS', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelKS', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelKS'): - super(IonChannelKS, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelKS') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelKS'): + super(IonChannelKS, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelKS') if self.species is not None and 'species' not in already_processed: already_processed.add('species') - outfile.write(' species=%s' % (quote_attrib(self.species), )) + outfile.write(' species=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.species), input_name='species')), )) if self.conductance is not None and 'conductance' not in already_processed: already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelKS', fromsubclass_=False, pretty_print=True): - super(IonChannelKS, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' conductance=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.conductance), input_name='conductance')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='IonChannelKS', fromsubclass_=False, pretty_print=True): + super(IonChannelKS, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for gateKS_ in self.gate_kses: + namespaceprefix_ = self.gate_kses_nsprefix_ + ':' if (UseCapturedNS_ and self.gate_kses_nsprefix_) else '' gateKS_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateKS', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('species', node) if value is not None and 'species' not in already_processed: already_processed.add('species') @@ -15100,31 +20320,36 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('conductance') self.conductance = value self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance - super(IonChannelKS, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(IonChannelKS, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'gateKS': obj_ = GateKS.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_kses.append(obj_) obj_.original_tagname_ = 'gateKS' - super(IonChannelKS, self).buildChildren(child_, node, nodeName_, True) + super(IonChannelKS, self)._buildChildren(child_, node, nodeName_, True) # end class IonChannelKS class IonChannelScalable(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('q10_conductance_scalings', 'Q10ConductanceScaling', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Q10ConductanceScaling', u'name': u'q10ConductanceScaling', u'minOccurs': u'0'}, None), + MemberSpec_('q10_conductance_scalings', 'Q10ConductanceScaling', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'q10ConductanceScaling', 'type': 'Q10ConductanceScaling'}, None), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannelScalable, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IonChannelScalable"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) if q10_conductance_scalings is None: self.q10_conductance_scalings = [] else: self.q10_conductance_scalings = q10_conductance_scalings + self.q10_conductance_scalings_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -15137,15 +20362,31 @@ def factory(*args_, **kwargs_): else: return IonChannelScalable(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_q10ConductanceScaling(self): + return self.q10_conductance_scalings + def set_q10ConductanceScaling(self, q10_conductance_scalings): + self.q10_conductance_scalings = q10_conductance_scalings + def add_q10ConductanceScaling(self, value): + self.q10_conductance_scalings.append(value) + def insert_q10ConductanceScaling_at(self, index, value): + self.q10_conductance_scalings.insert(index, value) + def replace_q10ConductanceScaling_at(self, index, value): + self.q10_conductance_scalings[index] = value + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( self.q10_conductance_scalings or - super(IonChannelScalable, self).hasContent_() + super(IonChannelScalable, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelScalable', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='IonChannelScalable', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannelScalable') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -15153,400 +20394,482 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IonChannelScalable': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelScalable') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelScalable') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelScalable', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelScalable', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelScalable'): - super(IonChannelScalable, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelScalable') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelScalable'): + super(IonChannelScalable, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelScalable') if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelScalable', fromsubclass_=False, pretty_print=True): - super(IonChannelScalable, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='IonChannelScalable', fromsubclass_=False, pretty_print=True): + super(IonChannelScalable, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for q10ConductanceScaling_ in self.q10_conductance_scalings: + namespaceprefix_ = self.q10_conductance_scalings_nsprefix_ + ':' if (UseCapturedNS_ and self.q10_conductance_scalings_nsprefix_) else '' q10ConductanceScaling_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10ConductanceScaling', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(IonChannelScalable, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(IonChannelScalable, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'q10ConductanceScaling': obj_ = Q10ConductanceScaling.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_conductance_scalings.append(obj_) obj_.original_tagname_ = 'q10ConductanceScaling' - super(IonChannelScalable, self).buildChildren(child_, node, nodeName_, True) + super(IonChannelScalable, self)._buildChildren(child_, node, nodeName_, True) # end class IonChannelScalable class NeuroMLDocument(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('includes', 'IncludeType', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IncludeType', u'name': u'include', u'minOccurs': u'0'}, None), - MemberSpec_('extracellular_properties', 'ExtracellularProperties', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExtracellularProperties', u'name': u'extracellularProperties', u'minOccurs': u'0'}, None), - MemberSpec_('intracellular_properties', 'IntracellularProperties', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IntracellularProperties', u'name': u'intracellularProperties', u'minOccurs': u'0'}, None), - MemberSpec_('morphology', 'Morphology', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Morphology', u'name': u'morphology', u'minOccurs': u'0'}, None), - MemberSpec_('ion_channel', 'IonChannel', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IonChannel', u'name': u'ionChannel', u'minOccurs': u'0'}, None), - MemberSpec_('ion_channel_hhs', 'IonChannelHH', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IonChannelHH', u'name': u'ionChannelHH', u'minOccurs': u'0'}, None), - MemberSpec_('ion_channel_v_shifts', 'IonChannelVShift', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IonChannelVShift', u'name': u'ionChannelVShift', u'minOccurs': u'0'}, None), - MemberSpec_('ion_channel_kses', 'IonChannelKS', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IonChannelKS', u'name': u'ionChannelKS', u'minOccurs': u'0'}, None), - MemberSpec_('decaying_pool_concentration_models', 'DecayingPoolConcentrationModel', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'DecayingPoolConcentrationModel', u'name': u'decayingPoolConcentrationModel', u'minOccurs': u'0'}, None), - MemberSpec_('fixed_factor_concentration_models', 'FixedFactorConcentrationModel', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'FixedFactorConcentrationModel', u'name': u'fixedFactorConcentrationModel', u'minOccurs': u'0'}, None), - MemberSpec_('alpha_current_synapses', 'AlphaCurrentSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AlphaCurrentSynapse', u'name': u'alphaCurrentSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('alpha_synapses', 'AlphaSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AlphaSynapse', u'name': u'alphaSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('exp_one_synapses', 'ExpOneSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpOneSynapse', u'name': u'expOneSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('exp_two_synapses', 'ExpTwoSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpTwoSynapse', u'name': u'expTwoSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('exp_three_synapses', 'ExpThreeSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpThreeSynapse', u'name': u'expThreeSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('blocking_plastic_synapses', 'BlockingPlasticSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'BlockingPlasticSynapse', u'name': u'blockingPlasticSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('double_synapses', 'DoubleSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'DoubleSynapse', u'name': u'doubleSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('gap_junctions', 'GapJunction', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GapJunction', u'name': u'gapJunction', u'minOccurs': u'0'}, None), - MemberSpec_('silent_synapses', 'SilentSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SilentSynapse', u'name': u'silentSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('linear_graded_synapses', 'LinearGradedSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'LinearGradedSynapse', u'name': u'linearGradedSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('graded_synapses', 'GradedSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GradedSynapse', u'name': u'gradedSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('biophysical_properties', 'BiophysicalProperties', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'BiophysicalProperties', u'name': u'biophysicalProperties', u'minOccurs': u'0'}, None), - MemberSpec_('cells', 'Cell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Cell', u'name': u'cell', u'minOccurs': u'0'}, None), - MemberSpec_('cell2_ca_poolses', 'Cell2CaPools', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Cell2CaPools', u'name': u'cell2CaPools', u'minOccurs': u'0'}, None), - MemberSpec_('base_cells', 'BaseCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'BaseCell', u'name': u'baseCell', u'minOccurs': u'0'}, None), - MemberSpec_('iaf_tau_cells', 'IafTauCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IafTauCell', u'name': u'iafTauCell', u'minOccurs': u'0'}, None), - MemberSpec_('iaf_tau_ref_cells', 'IafTauRefCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IafTauRefCell', u'name': u'iafTauRefCell', u'minOccurs': u'0'}, None), - MemberSpec_('iaf_cells', 'IafCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IafCell', u'name': u'iafCell', u'minOccurs': u'0'}, None), - MemberSpec_('iaf_ref_cells', 'IafRefCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IafRefCell', u'name': u'iafRefCell', u'minOccurs': u'0'}, None), - MemberSpec_('izhikevich_cells', 'IzhikevichCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IzhikevichCell', u'name': u'izhikevichCell', u'minOccurs': u'0'}, None), - MemberSpec_('izhikevich2007_cells', 'Izhikevich2007Cell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Izhikevich2007Cell', u'name': u'izhikevich2007Cell', u'minOccurs': u'0'}, None), - MemberSpec_('ad_ex_ia_f_cells', 'AdExIaFCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AdExIaFCell', u'name': u'adExIaFCell', u'minOccurs': u'0'}, None), - MemberSpec_('fitz_hugh_nagumo_cells', 'FitzHughNagumoCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'FitzHughNagumoCell', u'name': u'fitzHughNagumoCell', u'minOccurs': u'0'}, None), - MemberSpec_('fitz_hugh_nagumo1969_cells', 'FitzHughNagumo1969Cell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'FitzHughNagumo1969Cell', u'name': u'fitzHughNagumo1969Cell', u'minOccurs': u'0'}, None), - MemberSpec_('pinsky_rinzel_ca3_cells', 'PinskyRinzelCA3Cell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PinskyRinzelCA3Cell', u'name': u'pinskyRinzelCA3Cell', u'minOccurs': u'0'}, None), - MemberSpec_('pulse_generators', 'PulseGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PulseGenerator', u'name': u'pulseGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('pulse_generator_dls', 'PulseGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PulseGeneratorDL', u'name': u'pulseGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('sine_generators', 'SineGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SineGenerator', u'name': u'sineGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('sine_generator_dls', 'SineGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SineGeneratorDL', u'name': u'sineGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('ramp_generators', 'RampGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'RampGenerator', u'name': u'rampGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('ramp_generator_dls', 'RampGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'RampGeneratorDL', u'name': u'rampGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('compound_inputs', 'CompoundInput', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'CompoundInput', u'name': u'compoundInput', u'minOccurs': u'0'}, None), - MemberSpec_('compound_input_dls', 'CompoundInputDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'CompoundInputDL', u'name': u'compoundInputDL', u'minOccurs': u'0'}, None), - MemberSpec_('voltage_clamps', 'VoltageClamp', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VoltageClamp', u'name': u'voltageClamp', u'minOccurs': u'0'}, None), - MemberSpec_('voltage_clamp_triples', 'VoltageClampTriple', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VoltageClampTriple', u'name': u'voltageClampTriple', u'minOccurs': u'0'}, None), - MemberSpec_('spike_arrays', 'SpikeArray', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeArray', u'name': u'spikeArray', u'minOccurs': u'0'}, None), - MemberSpec_('timed_synaptic_inputs', 'TimedSynapticInput', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'TimedSynapticInput', u'name': u'timedSynapticInput', u'minOccurs': u'0'}, None), - MemberSpec_('spike_generators', 'SpikeGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeGenerator', u'name': u'spikeGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('spike_generator_randoms', 'SpikeGeneratorRandom', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeGeneratorRandom', u'name': u'spikeGeneratorRandom', u'minOccurs': u'0'}, None), - MemberSpec_('spike_generator_poissons', 'SpikeGeneratorPoisson', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeGeneratorPoisson', u'name': u'spikeGeneratorPoisson', u'minOccurs': u'0'}, None), - MemberSpec_('spike_generator_ref_poissons', 'SpikeGeneratorRefPoisson', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeGeneratorRefPoisson', u'name': u'spikeGeneratorRefPoisson', u'minOccurs': u'0'}, None), - MemberSpec_('poisson_firing_synapses', 'PoissonFiringSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PoissonFiringSynapse', u'name': u'poissonFiringSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('transient_poisson_firing_synapses', 'TransientPoissonFiringSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'TransientPoissonFiringSynapse', u'name': u'transientPoissonFiringSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('IF_curr_alpha', 'IF_curr_alpha', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IF_curr_alpha', u'name': u'IF_curr_alpha', u'minOccurs': u'0'}, None), - MemberSpec_('IF_curr_exp', 'IF_curr_exp', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IF_curr_exp', u'name': u'IF_curr_exp', u'minOccurs': u'0'}, None), - MemberSpec_('IF_cond_alpha', 'IF_cond_alpha', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IF_cond_alpha', u'name': u'IF_cond_alpha', u'minOccurs': u'0'}, None), - MemberSpec_('IF_cond_exp', 'IF_cond_exp', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IF_cond_exp', u'name': u'IF_cond_exp', u'minOccurs': u'0'}, None), - MemberSpec_('EIF_cond_exp_isfa_ista', 'EIF_cond_exp_isfa_ista', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'EIF_cond_exp_isfa_ista', u'name': u'EIF_cond_exp_isfa_ista', u'minOccurs': u'0'}, None), - MemberSpec_('EIF_cond_alpha_isfa_ista', 'EIF_cond_alpha_isfa_ista', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'EIF_cond_alpha_isfa_ista', u'name': u'EIF_cond_alpha_isfa_ista', u'minOccurs': u'0'}, None), - MemberSpec_('HH_cond_exp', 'HH_cond_exp', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'HH_cond_exp', u'name': u'HH_cond_exp', u'minOccurs': u'0'}, None), - MemberSpec_('exp_cond_synapses', 'ExpCondSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpCondSynapse', u'name': u'expCondSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('alpha_cond_synapses', 'AlphaCondSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AlphaCondSynapse', u'name': u'alphaCondSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('exp_curr_synapses', 'ExpCurrSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpCurrSynapse', u'name': u'expCurrSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('alpha_curr_synapses', 'AlphaCurrSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AlphaCurrSynapse', u'name': u'alphaCurrSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('SpikeSourcePoisson', 'SpikeSourcePoisson', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeSourcePoisson', u'name': u'SpikeSourcePoisson', u'minOccurs': u'0'}, None), - MemberSpec_('networks', 'Network', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Network', u'name': u'network', u'minOccurs': u'0'}, None), - MemberSpec_('ComponentType', 'ComponentType', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ComponentType', u'name': u'ComponentType', u'minOccurs': u'0'}, None), + MemberSpec_('includes', 'IncludeType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'include', 'type': 'IncludeType'}, None), + MemberSpec_('extracellular_properties', 'ExtracellularProperties', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'extracellularProperties', 'type': 'ExtracellularProperties'}, None), + MemberSpec_('intracellular_properties', 'IntracellularProperties', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'intracellularProperties', 'type': 'IntracellularProperties'}, None), + MemberSpec_('morphology', 'Morphology', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'morphology', 'type': 'Morphology'}, None), + MemberSpec_('ion_channel', 'IonChannel', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'ionChannel', 'type': 'IonChannel'}, None), + MemberSpec_('ion_channel_hhs', 'IonChannelHH', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'ionChannelHH', 'type': 'IonChannelHH'}, None), + MemberSpec_('ion_channel_v_shifts', 'IonChannelVShift', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'ionChannelVShift', 'type': 'IonChannelVShift'}, None), + MemberSpec_('ion_channel_kses', 'IonChannelKS', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'ionChannelKS', 'type': 'IonChannelKS'}, None), + MemberSpec_('decaying_pool_concentration_models', 'DecayingPoolConcentrationModel', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'decayingPoolConcentrationModel', 'type': 'DecayingPoolConcentrationModel'}, None), + MemberSpec_('fixed_factor_concentration_models', 'FixedFactorConcentrationModel', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'fixedFactorConcentrationModel', 'type': 'FixedFactorConcentrationModel'}, None), + MemberSpec_('alpha_current_synapses', 'AlphaCurrentSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'alphaCurrentSynapse', 'type': 'AlphaCurrentSynapse'}, None), + MemberSpec_('alpha_synapses', 'AlphaSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'alphaSynapse', 'type': 'AlphaSynapse'}, None), + MemberSpec_('exp_one_synapses', 'ExpOneSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'expOneSynapse', 'type': 'ExpOneSynapse'}, None), + MemberSpec_('exp_two_synapses', 'ExpTwoSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'expTwoSynapse', 'type': 'ExpTwoSynapse'}, None), + MemberSpec_('exp_three_synapses', 'ExpThreeSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'expThreeSynapse', 'type': 'ExpThreeSynapse'}, None), + MemberSpec_('blocking_plastic_synapses', 'BlockingPlasticSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'blockingPlasticSynapse', 'type': 'BlockingPlasticSynapse'}, None), + MemberSpec_('double_synapses', 'DoubleSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'doubleSynapse', 'type': 'DoubleSynapse'}, None), + MemberSpec_('gap_junctions', 'GapJunction', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'gapJunction', 'type': 'GapJunction'}, None), + MemberSpec_('silent_synapses', 'SilentSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'silentSynapse', 'type': 'SilentSynapse'}, None), + MemberSpec_('linear_graded_synapses', 'LinearGradedSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'linearGradedSynapse', 'type': 'LinearGradedSynapse'}, None), + MemberSpec_('graded_synapses', 'GradedSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'gradedSynapse', 'type': 'GradedSynapse'}, None), + MemberSpec_('biophysical_properties', 'BiophysicalProperties', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'biophysicalProperties', 'type': 'BiophysicalProperties'}, None), + MemberSpec_('cells', 'Cell', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'cell', 'type': 'Cell'}, None), + MemberSpec_('cell2_ca_poolses', 'Cell2CaPools', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'cell2CaPools', 'type': 'Cell2CaPools'}, None), + MemberSpec_('base_cells', 'BaseCell', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'baseCell', 'type': 'BaseCell'}, None), + MemberSpec_('iaf_tau_cells', 'IafTauCell', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'iafTauCell', 'type': 'IafTauCell'}, None), + MemberSpec_('iaf_tau_ref_cells', 'IafTauRefCell', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'iafTauRefCell', 'type': 'IafTauRefCell'}, None), + MemberSpec_('iaf_cells', 'IafCell', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'iafCell', 'type': 'IafCell'}, None), + MemberSpec_('iaf_ref_cells', 'IafRefCell', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'iafRefCell', 'type': 'IafRefCell'}, None), + MemberSpec_('izhikevich_cells', 'IzhikevichCell', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'izhikevichCell', 'type': 'IzhikevichCell'}, None), + MemberSpec_('izhikevich2007_cells', 'Izhikevich2007Cell', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'izhikevich2007Cell', 'type': 'Izhikevich2007Cell'}, None), + MemberSpec_('ad_ex_ia_f_cells', 'AdExIaFCell', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'adExIaFCell', 'type': 'AdExIaFCell'}, None), + MemberSpec_('fitz_hugh_nagumo_cells', 'FitzHughNagumoCell', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'fitzHughNagumoCell', 'type': 'FitzHughNagumoCell'}, None), + MemberSpec_('fitz_hugh_nagumo1969_cells', 'FitzHughNagumo1969Cell', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'fitzHughNagumo1969Cell', 'type': 'FitzHughNagumo1969Cell'}, None), + MemberSpec_('pinsky_rinzel_ca3_cells', 'PinskyRinzelCA3Cell', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'pinskyRinzelCA3Cell', 'type': 'PinskyRinzelCA3Cell'}, None), + MemberSpec_('pulse_generators', 'PulseGenerator', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'pulseGenerator', 'type': 'PulseGenerator'}, None), + MemberSpec_('pulse_generator_dls', 'PulseGeneratorDL', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'pulseGeneratorDL', 'type': 'PulseGeneratorDL'}, None), + MemberSpec_('sine_generators', 'SineGenerator', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'sineGenerator', 'type': 'SineGenerator'}, None), + MemberSpec_('sine_generator_dls', 'SineGeneratorDL', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'sineGeneratorDL', 'type': 'SineGeneratorDL'}, None), + MemberSpec_('ramp_generators', 'RampGenerator', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'rampGenerator', 'type': 'RampGenerator'}, None), + MemberSpec_('ramp_generator_dls', 'RampGeneratorDL', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'rampGeneratorDL', 'type': 'RampGeneratorDL'}, None), + MemberSpec_('compound_inputs', 'CompoundInput', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'compoundInput', 'type': 'CompoundInput'}, None), + MemberSpec_('compound_input_dls', 'CompoundInputDL', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'compoundInputDL', 'type': 'CompoundInputDL'}, None), + MemberSpec_('voltage_clamps', 'VoltageClamp', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'voltageClamp', 'type': 'VoltageClamp'}, None), + MemberSpec_('voltage_clamp_triples', 'VoltageClampTriple', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'voltageClampTriple', 'type': 'VoltageClampTriple'}, None), + MemberSpec_('spike_arrays', 'SpikeArray', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'spikeArray', 'type': 'SpikeArray'}, None), + MemberSpec_('timed_synaptic_inputs', 'TimedSynapticInput', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'timedSynapticInput', 'type': 'TimedSynapticInput'}, None), + MemberSpec_('spike_generators', 'SpikeGenerator', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'spikeGenerator', 'type': 'SpikeGenerator'}, None), + MemberSpec_('spike_generator_randoms', 'SpikeGeneratorRandom', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'spikeGeneratorRandom', 'type': 'SpikeGeneratorRandom'}, None), + MemberSpec_('spike_generator_poissons', 'SpikeGeneratorPoisson', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'spikeGeneratorPoisson', 'type': 'SpikeGeneratorPoisson'}, None), + MemberSpec_('spike_generator_ref_poissons', 'SpikeGeneratorRefPoisson', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'spikeGeneratorRefPoisson', 'type': 'SpikeGeneratorRefPoisson'}, None), + MemberSpec_('poisson_firing_synapses', 'PoissonFiringSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'poissonFiringSynapse', 'type': 'PoissonFiringSynapse'}, None), + MemberSpec_('transient_poisson_firing_synapses', 'TransientPoissonFiringSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'transientPoissonFiringSynapse', 'type': 'TransientPoissonFiringSynapse'}, None), + MemberSpec_('IF_curr_alpha', 'IF_curr_alpha', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'IF_curr_alpha', 'type': 'IF_curr_alpha'}, None), + MemberSpec_('IF_curr_exp', 'IF_curr_exp', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'IF_curr_exp', 'type': 'IF_curr_exp'}, None), + MemberSpec_('IF_cond_alpha', 'IF_cond_alpha', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'IF_cond_alpha', 'type': 'IF_cond_alpha'}, None), + MemberSpec_('IF_cond_exp', 'IF_cond_exp', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'IF_cond_exp', 'type': 'IF_cond_exp'}, None), + MemberSpec_('EIF_cond_exp_isfa_ista', 'EIF_cond_exp_isfa_ista', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'EIF_cond_exp_isfa_ista', 'type': 'EIF_cond_exp_isfa_ista'}, None), + MemberSpec_('EIF_cond_alpha_isfa_ista', 'EIF_cond_alpha_isfa_ista', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'EIF_cond_alpha_isfa_ista', 'type': 'EIF_cond_alpha_isfa_ista'}, None), + MemberSpec_('HH_cond_exp', 'HH_cond_exp', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'HH_cond_exp', 'type': 'HH_cond_exp'}, None), + MemberSpec_('exp_cond_synapses', 'ExpCondSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'expCondSynapse', 'type': 'ExpCondSynapse'}, None), + MemberSpec_('alpha_cond_synapses', 'AlphaCondSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'alphaCondSynapse', 'type': 'AlphaCondSynapse'}, None), + MemberSpec_('exp_curr_synapses', 'ExpCurrSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'expCurrSynapse', 'type': 'ExpCurrSynapse'}, None), + MemberSpec_('alpha_curr_synapses', 'AlphaCurrSynapse', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'alphaCurrSynapse', 'type': 'AlphaCurrSynapse'}, None), + MemberSpec_('SpikeSourcePoisson', 'SpikeSourcePoisson', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'SpikeSourcePoisson', 'type': 'SpikeSourcePoisson'}, None), + MemberSpec_('networks', 'Network', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'network', 'type': 'Network'}, None), + MemberSpec_('ComponentType', 'ComponentType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'ComponentType', 'type': 'ComponentType'}, None), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, includes=None, extracellular_properties=None, intracellular_properties=None, morphology=None, ion_channel=None, ion_channel_hhs=None, ion_channel_v_shifts=None, ion_channel_kses=None, decaying_pool_concentration_models=None, fixed_factor_concentration_models=None, alpha_current_synapses=None, alpha_synapses=None, exp_one_synapses=None, exp_two_synapses=None, exp_three_synapses=None, blocking_plastic_synapses=None, double_synapses=None, gap_junctions=None, silent_synapses=None, linear_graded_synapses=None, graded_synapses=None, biophysical_properties=None, cells=None, cell2_ca_poolses=None, base_cells=None, iaf_tau_cells=None, iaf_tau_ref_cells=None, iaf_cells=None, iaf_ref_cells=None, izhikevich_cells=None, izhikevich2007_cells=None, ad_ex_ia_f_cells=None, fitz_hugh_nagumo_cells=None, fitz_hugh_nagumo1969_cells=None, pinsky_rinzel_ca3_cells=None, pulse_generators=None, pulse_generator_dls=None, sine_generators=None, sine_generator_dls=None, ramp_generators=None, ramp_generator_dls=None, compound_inputs=None, compound_input_dls=None, voltage_clamps=None, voltage_clamp_triples=None, spike_arrays=None, timed_synaptic_inputs=None, spike_generators=None, spike_generator_randoms=None, spike_generator_poissons=None, spike_generator_ref_poissons=None, poisson_firing_synapses=None, transient_poisson_firing_synapses=None, IF_curr_alpha=None, IF_curr_exp=None, IF_cond_alpha=None, IF_cond_exp=None, EIF_cond_exp_isfa_ista=None, EIF_cond_alpha_isfa_ista=None, HH_cond_exp=None, exp_cond_synapses=None, alpha_cond_synapses=None, exp_curr_synapses=None, alpha_curr_synapses=None, SpikeSourcePoisson=None, networks=None, ComponentType=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, includes=None, extracellular_properties=None, intracellular_properties=None, morphology=None, ion_channel=None, ion_channel_hhs=None, ion_channel_v_shifts=None, ion_channel_kses=None, decaying_pool_concentration_models=None, fixed_factor_concentration_models=None, alpha_current_synapses=None, alpha_synapses=None, exp_one_synapses=None, exp_two_synapses=None, exp_three_synapses=None, blocking_plastic_synapses=None, double_synapses=None, gap_junctions=None, silent_synapses=None, linear_graded_synapses=None, graded_synapses=None, biophysical_properties=None, cells=None, cell2_ca_poolses=None, base_cells=None, iaf_tau_cells=None, iaf_tau_ref_cells=None, iaf_cells=None, iaf_ref_cells=None, izhikevich_cells=None, izhikevich2007_cells=None, ad_ex_ia_f_cells=None, fitz_hugh_nagumo_cells=None, fitz_hugh_nagumo1969_cells=None, pinsky_rinzel_ca3_cells=None, pulse_generators=None, pulse_generator_dls=None, sine_generators=None, sine_generator_dls=None, ramp_generators=None, ramp_generator_dls=None, compound_inputs=None, compound_input_dls=None, voltage_clamps=None, voltage_clamp_triples=None, spike_arrays=None, timed_synaptic_inputs=None, spike_generators=None, spike_generator_randoms=None, spike_generator_poissons=None, spike_generator_ref_poissons=None, poisson_firing_synapses=None, transient_poisson_firing_synapses=None, IF_curr_alpha=None, IF_curr_exp=None, IF_cond_alpha=None, IF_cond_exp=None, EIF_cond_exp_isfa_ista=None, EIF_cond_alpha_isfa_ista=None, HH_cond_exp=None, exp_cond_synapses=None, alpha_cond_synapses=None, exp_curr_synapses=None, alpha_curr_synapses=None, SpikeSourcePoisson=None, networks=None, ComponentType=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(NeuroMLDocument, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("NeuroMLDocument"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) if includes is None: self.includes = [] else: self.includes = includes + self.includes_nsprefix_ = None if extracellular_properties is None: self.extracellular_properties = [] else: self.extracellular_properties = extracellular_properties + self.extracellular_properties_nsprefix_ = None if intracellular_properties is None: self.intracellular_properties = [] else: self.intracellular_properties = intracellular_properties + self.intracellular_properties_nsprefix_ = None if morphology is None: self.morphology = [] else: self.morphology = morphology + self.morphology_nsprefix_ = None if ion_channel is None: self.ion_channel = [] else: self.ion_channel = ion_channel + self.ion_channel_nsprefix_ = None if ion_channel_hhs is None: self.ion_channel_hhs = [] else: self.ion_channel_hhs = ion_channel_hhs + self.ion_channel_hhs_nsprefix_ = None if ion_channel_v_shifts is None: self.ion_channel_v_shifts = [] else: self.ion_channel_v_shifts = ion_channel_v_shifts + self.ion_channel_v_shifts_nsprefix_ = None if ion_channel_kses is None: self.ion_channel_kses = [] else: self.ion_channel_kses = ion_channel_kses + self.ion_channel_kses_nsprefix_ = None if decaying_pool_concentration_models is None: self.decaying_pool_concentration_models = [] else: self.decaying_pool_concentration_models = decaying_pool_concentration_models + self.decaying_pool_concentration_models_nsprefix_ = None if fixed_factor_concentration_models is None: self.fixed_factor_concentration_models = [] else: self.fixed_factor_concentration_models = fixed_factor_concentration_models + self.fixed_factor_concentration_models_nsprefix_ = None if alpha_current_synapses is None: self.alpha_current_synapses = [] else: self.alpha_current_synapses = alpha_current_synapses + self.alpha_current_synapses_nsprefix_ = None if alpha_synapses is None: self.alpha_synapses = [] else: self.alpha_synapses = alpha_synapses + self.alpha_synapses_nsprefix_ = None if exp_one_synapses is None: self.exp_one_synapses = [] else: self.exp_one_synapses = exp_one_synapses + self.exp_one_synapses_nsprefix_ = None if exp_two_synapses is None: self.exp_two_synapses = [] else: self.exp_two_synapses = exp_two_synapses + self.exp_two_synapses_nsprefix_ = None if exp_three_synapses is None: self.exp_three_synapses = [] else: self.exp_three_synapses = exp_three_synapses + self.exp_three_synapses_nsprefix_ = None if blocking_plastic_synapses is None: self.blocking_plastic_synapses = [] else: self.blocking_plastic_synapses = blocking_plastic_synapses + self.blocking_plastic_synapses_nsprefix_ = None if double_synapses is None: self.double_synapses = [] else: self.double_synapses = double_synapses + self.double_synapses_nsprefix_ = None if gap_junctions is None: self.gap_junctions = [] else: self.gap_junctions = gap_junctions + self.gap_junctions_nsprefix_ = None if silent_synapses is None: self.silent_synapses = [] else: self.silent_synapses = silent_synapses + self.silent_synapses_nsprefix_ = None if linear_graded_synapses is None: self.linear_graded_synapses = [] else: self.linear_graded_synapses = linear_graded_synapses + self.linear_graded_synapses_nsprefix_ = None if graded_synapses is None: self.graded_synapses = [] else: self.graded_synapses = graded_synapses + self.graded_synapses_nsprefix_ = None if biophysical_properties is None: self.biophysical_properties = [] else: self.biophysical_properties = biophysical_properties + self.biophysical_properties_nsprefix_ = None if cells is None: self.cells = [] else: self.cells = cells + self.cells_nsprefix_ = None if cell2_ca_poolses is None: self.cell2_ca_poolses = [] else: self.cell2_ca_poolses = cell2_ca_poolses + self.cell2_ca_poolses_nsprefix_ = None if base_cells is None: self.base_cells = [] else: self.base_cells = base_cells + self.base_cells_nsprefix_ = None if iaf_tau_cells is None: self.iaf_tau_cells = [] else: self.iaf_tau_cells = iaf_tau_cells + self.iaf_tau_cells_nsprefix_ = None if iaf_tau_ref_cells is None: self.iaf_tau_ref_cells = [] else: self.iaf_tau_ref_cells = iaf_tau_ref_cells + self.iaf_tau_ref_cells_nsprefix_ = None if iaf_cells is None: self.iaf_cells = [] else: self.iaf_cells = iaf_cells + self.iaf_cells_nsprefix_ = None if iaf_ref_cells is None: self.iaf_ref_cells = [] else: self.iaf_ref_cells = iaf_ref_cells + self.iaf_ref_cells_nsprefix_ = None if izhikevich_cells is None: self.izhikevich_cells = [] else: self.izhikevich_cells = izhikevich_cells + self.izhikevich_cells_nsprefix_ = None if izhikevich2007_cells is None: self.izhikevich2007_cells = [] else: self.izhikevich2007_cells = izhikevich2007_cells + self.izhikevich2007_cells_nsprefix_ = None if ad_ex_ia_f_cells is None: self.ad_ex_ia_f_cells = [] else: self.ad_ex_ia_f_cells = ad_ex_ia_f_cells + self.ad_ex_ia_f_cells_nsprefix_ = None if fitz_hugh_nagumo_cells is None: self.fitz_hugh_nagumo_cells = [] else: self.fitz_hugh_nagumo_cells = fitz_hugh_nagumo_cells + self.fitz_hugh_nagumo_cells_nsprefix_ = None if fitz_hugh_nagumo1969_cells is None: self.fitz_hugh_nagumo1969_cells = [] else: self.fitz_hugh_nagumo1969_cells = fitz_hugh_nagumo1969_cells + self.fitz_hugh_nagumo1969_cells_nsprefix_ = None if pinsky_rinzel_ca3_cells is None: self.pinsky_rinzel_ca3_cells = [] else: self.pinsky_rinzel_ca3_cells = pinsky_rinzel_ca3_cells + self.pinsky_rinzel_ca3_cells_nsprefix_ = None if pulse_generators is None: self.pulse_generators = [] else: self.pulse_generators = pulse_generators + self.pulse_generators_nsprefix_ = None if pulse_generator_dls is None: self.pulse_generator_dls = [] else: self.pulse_generator_dls = pulse_generator_dls + self.pulse_generator_dls_nsprefix_ = None if sine_generators is None: self.sine_generators = [] else: self.sine_generators = sine_generators + self.sine_generators_nsprefix_ = None if sine_generator_dls is None: self.sine_generator_dls = [] else: self.sine_generator_dls = sine_generator_dls + self.sine_generator_dls_nsprefix_ = None if ramp_generators is None: self.ramp_generators = [] else: self.ramp_generators = ramp_generators + self.ramp_generators_nsprefix_ = None if ramp_generator_dls is None: self.ramp_generator_dls = [] else: self.ramp_generator_dls = ramp_generator_dls + self.ramp_generator_dls_nsprefix_ = None if compound_inputs is None: self.compound_inputs = [] else: self.compound_inputs = compound_inputs + self.compound_inputs_nsprefix_ = None if compound_input_dls is None: self.compound_input_dls = [] else: self.compound_input_dls = compound_input_dls + self.compound_input_dls_nsprefix_ = None if voltage_clamps is None: self.voltage_clamps = [] else: self.voltage_clamps = voltage_clamps + self.voltage_clamps_nsprefix_ = None if voltage_clamp_triples is None: self.voltage_clamp_triples = [] else: self.voltage_clamp_triples = voltage_clamp_triples + self.voltage_clamp_triples_nsprefix_ = None if spike_arrays is None: self.spike_arrays = [] else: self.spike_arrays = spike_arrays + self.spike_arrays_nsprefix_ = None if timed_synaptic_inputs is None: self.timed_synaptic_inputs = [] else: self.timed_synaptic_inputs = timed_synaptic_inputs + self.timed_synaptic_inputs_nsprefix_ = None if spike_generators is None: self.spike_generators = [] else: self.spike_generators = spike_generators + self.spike_generators_nsprefix_ = None if spike_generator_randoms is None: self.spike_generator_randoms = [] else: self.spike_generator_randoms = spike_generator_randoms + self.spike_generator_randoms_nsprefix_ = None if spike_generator_poissons is None: self.spike_generator_poissons = [] else: self.spike_generator_poissons = spike_generator_poissons + self.spike_generator_poissons_nsprefix_ = None if spike_generator_ref_poissons is None: self.spike_generator_ref_poissons = [] else: self.spike_generator_ref_poissons = spike_generator_ref_poissons + self.spike_generator_ref_poissons_nsprefix_ = None if poisson_firing_synapses is None: self.poisson_firing_synapses = [] else: self.poisson_firing_synapses = poisson_firing_synapses + self.poisson_firing_synapses_nsprefix_ = None if transient_poisson_firing_synapses is None: self.transient_poisson_firing_synapses = [] else: self.transient_poisson_firing_synapses = transient_poisson_firing_synapses + self.transient_poisson_firing_synapses_nsprefix_ = None if IF_curr_alpha is None: self.IF_curr_alpha = [] else: self.IF_curr_alpha = IF_curr_alpha + self.IF_curr_alpha_nsprefix_ = None if IF_curr_exp is None: self.IF_curr_exp = [] else: self.IF_curr_exp = IF_curr_exp + self.IF_curr_exp_nsprefix_ = None if IF_cond_alpha is None: self.IF_cond_alpha = [] else: self.IF_cond_alpha = IF_cond_alpha + self.IF_cond_alpha_nsprefix_ = None if IF_cond_exp is None: self.IF_cond_exp = [] else: self.IF_cond_exp = IF_cond_exp + self.IF_cond_exp_nsprefix_ = None if EIF_cond_exp_isfa_ista is None: self.EIF_cond_exp_isfa_ista = [] else: self.EIF_cond_exp_isfa_ista = EIF_cond_exp_isfa_ista + self.EIF_cond_exp_isfa_ista_nsprefix_ = None if EIF_cond_alpha_isfa_ista is None: self.EIF_cond_alpha_isfa_ista = [] else: self.EIF_cond_alpha_isfa_ista = EIF_cond_alpha_isfa_ista + self.EIF_cond_alpha_isfa_ista_nsprefix_ = None if HH_cond_exp is None: self.HH_cond_exp = [] else: self.HH_cond_exp = HH_cond_exp + self.HH_cond_exp_nsprefix_ = None if exp_cond_synapses is None: self.exp_cond_synapses = [] else: self.exp_cond_synapses = exp_cond_synapses + self.exp_cond_synapses_nsprefix_ = None if alpha_cond_synapses is None: self.alpha_cond_synapses = [] else: self.alpha_cond_synapses = alpha_cond_synapses + self.alpha_cond_synapses_nsprefix_ = None if exp_curr_synapses is None: self.exp_curr_synapses = [] else: self.exp_curr_synapses = exp_curr_synapses + self.exp_curr_synapses_nsprefix_ = None if alpha_curr_synapses is None: self.alpha_curr_synapses = [] else: self.alpha_curr_synapses = alpha_curr_synapses + self.alpha_curr_synapses_nsprefix_ = None if SpikeSourcePoisson is None: self.SpikeSourcePoisson = [] else: self.SpikeSourcePoisson = SpikeSourcePoisson + self.SpikeSourcePoisson_nsprefix_ = None if networks is None: self.networks = [] else: self.networks = networks + self.networks_nsprefix_ = None if ComponentType is None: self.ComponentType = [] else: self.ComponentType = ComponentType + self.ComponentType_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -15558,7 +20881,681 @@ def factory(*args_, **kwargs_): else: return NeuroMLDocument(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_include(self): + return self.includes + def set_include(self, includes): + self.includes = includes + def add_include(self, value): + self.includes.append(value) + def insert_include_at(self, index, value): + self.includes.insert(index, value) + def replace_include_at(self, index, value): + self.includes[index] = value + def get_extracellularProperties(self): + return self.extracellular_properties + def set_extracellularProperties(self, extracellular_properties): + self.extracellular_properties = extracellular_properties + def add_extracellularProperties(self, value): + self.extracellular_properties.append(value) + def insert_extracellularProperties_at(self, index, value): + self.extracellular_properties.insert(index, value) + def replace_extracellularProperties_at(self, index, value): + self.extracellular_properties[index] = value + def get_intracellularProperties(self): + return self.intracellular_properties + def set_intracellularProperties(self, intracellular_properties): + self.intracellular_properties = intracellular_properties + def add_intracellularProperties(self, value): + self.intracellular_properties.append(value) + def insert_intracellularProperties_at(self, index, value): + self.intracellular_properties.insert(index, value) + def replace_intracellularProperties_at(self, index, value): + self.intracellular_properties[index] = value + def get_morphology(self): + return self.morphology + def set_morphology(self, morphology): + self.morphology = morphology + def add_morphology(self, value): + self.morphology.append(value) + def insert_morphology_at(self, index, value): + self.morphology.insert(index, value) + def replace_morphology_at(self, index, value): + self.morphology[index] = value + def get_ionChannel(self): + return self.ion_channel + def set_ionChannel(self, ion_channel): + self.ion_channel = ion_channel + def add_ionChannel(self, value): + self.ion_channel.append(value) + def insert_ionChannel_at(self, index, value): + self.ion_channel.insert(index, value) + def replace_ionChannel_at(self, index, value): + self.ion_channel[index] = value + def get_ionChannelHH(self): + return self.ion_channel_hhs + def set_ionChannelHH(self, ion_channel_hhs): + self.ion_channel_hhs = ion_channel_hhs + def add_ionChannelHH(self, value): + self.ion_channel_hhs.append(value) + def insert_ionChannelHH_at(self, index, value): + self.ion_channel_hhs.insert(index, value) + def replace_ionChannelHH_at(self, index, value): + self.ion_channel_hhs[index] = value + def get_ionChannelVShift(self): + return self.ion_channel_v_shifts + def set_ionChannelVShift(self, ion_channel_v_shifts): + self.ion_channel_v_shifts = ion_channel_v_shifts + def add_ionChannelVShift(self, value): + self.ion_channel_v_shifts.append(value) + def insert_ionChannelVShift_at(self, index, value): + self.ion_channel_v_shifts.insert(index, value) + def replace_ionChannelVShift_at(self, index, value): + self.ion_channel_v_shifts[index] = value + def get_ionChannelKS(self): + return self.ion_channel_kses + def set_ionChannelKS(self, ion_channel_kses): + self.ion_channel_kses = ion_channel_kses + def add_ionChannelKS(self, value): + self.ion_channel_kses.append(value) + def insert_ionChannelKS_at(self, index, value): + self.ion_channel_kses.insert(index, value) + def replace_ionChannelKS_at(self, index, value): + self.ion_channel_kses[index] = value + def get_decayingPoolConcentrationModel(self): + return self.decaying_pool_concentration_models + def set_decayingPoolConcentrationModel(self, decaying_pool_concentration_models): + self.decaying_pool_concentration_models = decaying_pool_concentration_models + def add_decayingPoolConcentrationModel(self, value): + self.decaying_pool_concentration_models.append(value) + def insert_decayingPoolConcentrationModel_at(self, index, value): + self.decaying_pool_concentration_models.insert(index, value) + def replace_decayingPoolConcentrationModel_at(self, index, value): + self.decaying_pool_concentration_models[index] = value + def get_fixedFactorConcentrationModel(self): + return self.fixed_factor_concentration_models + def set_fixedFactorConcentrationModel(self, fixed_factor_concentration_models): + self.fixed_factor_concentration_models = fixed_factor_concentration_models + def add_fixedFactorConcentrationModel(self, value): + self.fixed_factor_concentration_models.append(value) + def insert_fixedFactorConcentrationModel_at(self, index, value): + self.fixed_factor_concentration_models.insert(index, value) + def replace_fixedFactorConcentrationModel_at(self, index, value): + self.fixed_factor_concentration_models[index] = value + def get_alphaCurrentSynapse(self): + return self.alpha_current_synapses + def set_alphaCurrentSynapse(self, alpha_current_synapses): + self.alpha_current_synapses = alpha_current_synapses + def add_alphaCurrentSynapse(self, value): + self.alpha_current_synapses.append(value) + def insert_alphaCurrentSynapse_at(self, index, value): + self.alpha_current_synapses.insert(index, value) + def replace_alphaCurrentSynapse_at(self, index, value): + self.alpha_current_synapses[index] = value + def get_alphaSynapse(self): + return self.alpha_synapses + def set_alphaSynapse(self, alpha_synapses): + self.alpha_synapses = alpha_synapses + def add_alphaSynapse(self, value): + self.alpha_synapses.append(value) + def insert_alphaSynapse_at(self, index, value): + self.alpha_synapses.insert(index, value) + def replace_alphaSynapse_at(self, index, value): + self.alpha_synapses[index] = value + def get_expOneSynapse(self): + return self.exp_one_synapses + def set_expOneSynapse(self, exp_one_synapses): + self.exp_one_synapses = exp_one_synapses + def add_expOneSynapse(self, value): + self.exp_one_synapses.append(value) + def insert_expOneSynapse_at(self, index, value): + self.exp_one_synapses.insert(index, value) + def replace_expOneSynapse_at(self, index, value): + self.exp_one_synapses[index] = value + def get_expTwoSynapse(self): + return self.exp_two_synapses + def set_expTwoSynapse(self, exp_two_synapses): + self.exp_two_synapses = exp_two_synapses + def add_expTwoSynapse(self, value): + self.exp_two_synapses.append(value) + def insert_expTwoSynapse_at(self, index, value): + self.exp_two_synapses.insert(index, value) + def replace_expTwoSynapse_at(self, index, value): + self.exp_two_synapses[index] = value + def get_expThreeSynapse(self): + return self.exp_three_synapses + def set_expThreeSynapse(self, exp_three_synapses): + self.exp_three_synapses = exp_three_synapses + def add_expThreeSynapse(self, value): + self.exp_three_synapses.append(value) + def insert_expThreeSynapse_at(self, index, value): + self.exp_three_synapses.insert(index, value) + def replace_expThreeSynapse_at(self, index, value): + self.exp_three_synapses[index] = value + def get_blockingPlasticSynapse(self): + return self.blocking_plastic_synapses + def set_blockingPlasticSynapse(self, blocking_plastic_synapses): + self.blocking_plastic_synapses = blocking_plastic_synapses + def add_blockingPlasticSynapse(self, value): + self.blocking_plastic_synapses.append(value) + def insert_blockingPlasticSynapse_at(self, index, value): + self.blocking_plastic_synapses.insert(index, value) + def replace_blockingPlasticSynapse_at(self, index, value): + self.blocking_plastic_synapses[index] = value + def get_doubleSynapse(self): + return self.double_synapses + def set_doubleSynapse(self, double_synapses): + self.double_synapses = double_synapses + def add_doubleSynapse(self, value): + self.double_synapses.append(value) + def insert_doubleSynapse_at(self, index, value): + self.double_synapses.insert(index, value) + def replace_doubleSynapse_at(self, index, value): + self.double_synapses[index] = value + def get_gapJunction(self): + return self.gap_junctions + def set_gapJunction(self, gap_junctions): + self.gap_junctions = gap_junctions + def add_gapJunction(self, value): + self.gap_junctions.append(value) + def insert_gapJunction_at(self, index, value): + self.gap_junctions.insert(index, value) + def replace_gapJunction_at(self, index, value): + self.gap_junctions[index] = value + def get_silentSynapse(self): + return self.silent_synapses + def set_silentSynapse(self, silent_synapses): + self.silent_synapses = silent_synapses + def add_silentSynapse(self, value): + self.silent_synapses.append(value) + def insert_silentSynapse_at(self, index, value): + self.silent_synapses.insert(index, value) + def replace_silentSynapse_at(self, index, value): + self.silent_synapses[index] = value + def get_linearGradedSynapse(self): + return self.linear_graded_synapses + def set_linearGradedSynapse(self, linear_graded_synapses): + self.linear_graded_synapses = linear_graded_synapses + def add_linearGradedSynapse(self, value): + self.linear_graded_synapses.append(value) + def insert_linearGradedSynapse_at(self, index, value): + self.linear_graded_synapses.insert(index, value) + def replace_linearGradedSynapse_at(self, index, value): + self.linear_graded_synapses[index] = value + def get_gradedSynapse(self): + return self.graded_synapses + def set_gradedSynapse(self, graded_synapses): + self.graded_synapses = graded_synapses + def add_gradedSynapse(self, value): + self.graded_synapses.append(value) + def insert_gradedSynapse_at(self, index, value): + self.graded_synapses.insert(index, value) + def replace_gradedSynapse_at(self, index, value): + self.graded_synapses[index] = value + def get_biophysicalProperties(self): + return self.biophysical_properties + def set_biophysicalProperties(self, biophysical_properties): + self.biophysical_properties = biophysical_properties + def add_biophysicalProperties(self, value): + self.biophysical_properties.append(value) + def insert_biophysicalProperties_at(self, index, value): + self.biophysical_properties.insert(index, value) + def replace_biophysicalProperties_at(self, index, value): + self.biophysical_properties[index] = value + def get_cell(self): + return self.cells + def set_cell(self, cells): + self.cells = cells + def add_cell(self, value): + self.cells.append(value) + def insert_cell_at(self, index, value): + self.cells.insert(index, value) + def replace_cell_at(self, index, value): + self.cells[index] = value + def get_cell2CaPools(self): + return self.cell2_ca_poolses + def set_cell2CaPools(self, cell2_ca_poolses): + self.cell2_ca_poolses = cell2_ca_poolses + def add_cell2CaPools(self, value): + self.cell2_ca_poolses.append(value) + def insert_cell2CaPools_at(self, index, value): + self.cell2_ca_poolses.insert(index, value) + def replace_cell2CaPools_at(self, index, value): + self.cell2_ca_poolses[index] = value + def get_baseCell(self): + return self.base_cells + def set_baseCell(self, base_cells): + self.base_cells = base_cells + def add_baseCell(self, value): + self.base_cells.append(value) + def insert_baseCell_at(self, index, value): + self.base_cells.insert(index, value) + def replace_baseCell_at(self, index, value): + self.base_cells[index] = value + def get_iafTauCell(self): + return self.iaf_tau_cells + def set_iafTauCell(self, iaf_tau_cells): + self.iaf_tau_cells = iaf_tau_cells + def add_iafTauCell(self, value): + self.iaf_tau_cells.append(value) + def insert_iafTauCell_at(self, index, value): + self.iaf_tau_cells.insert(index, value) + def replace_iafTauCell_at(self, index, value): + self.iaf_tau_cells[index] = value + def get_iafTauRefCell(self): + return self.iaf_tau_ref_cells + def set_iafTauRefCell(self, iaf_tau_ref_cells): + self.iaf_tau_ref_cells = iaf_tau_ref_cells + def add_iafTauRefCell(self, value): + self.iaf_tau_ref_cells.append(value) + def insert_iafTauRefCell_at(self, index, value): + self.iaf_tau_ref_cells.insert(index, value) + def replace_iafTauRefCell_at(self, index, value): + self.iaf_tau_ref_cells[index] = value + def get_iafCell(self): + return self.iaf_cells + def set_iafCell(self, iaf_cells): + self.iaf_cells = iaf_cells + def add_iafCell(self, value): + self.iaf_cells.append(value) + def insert_iafCell_at(self, index, value): + self.iaf_cells.insert(index, value) + def replace_iafCell_at(self, index, value): + self.iaf_cells[index] = value + def get_iafRefCell(self): + return self.iaf_ref_cells + def set_iafRefCell(self, iaf_ref_cells): + self.iaf_ref_cells = iaf_ref_cells + def add_iafRefCell(self, value): + self.iaf_ref_cells.append(value) + def insert_iafRefCell_at(self, index, value): + self.iaf_ref_cells.insert(index, value) + def replace_iafRefCell_at(self, index, value): + self.iaf_ref_cells[index] = value + def get_izhikevichCell(self): + return self.izhikevich_cells + def set_izhikevichCell(self, izhikevich_cells): + self.izhikevich_cells = izhikevich_cells + def add_izhikevichCell(self, value): + self.izhikevich_cells.append(value) + def insert_izhikevichCell_at(self, index, value): + self.izhikevich_cells.insert(index, value) + def replace_izhikevichCell_at(self, index, value): + self.izhikevich_cells[index] = value + def get_izhikevich2007Cell(self): + return self.izhikevich2007_cells + def set_izhikevich2007Cell(self, izhikevich2007_cells): + self.izhikevich2007_cells = izhikevich2007_cells + def add_izhikevich2007Cell(self, value): + self.izhikevich2007_cells.append(value) + def insert_izhikevich2007Cell_at(self, index, value): + self.izhikevich2007_cells.insert(index, value) + def replace_izhikevich2007Cell_at(self, index, value): + self.izhikevich2007_cells[index] = value + def get_adExIaFCell(self): + return self.ad_ex_ia_f_cells + def set_adExIaFCell(self, ad_ex_ia_f_cells): + self.ad_ex_ia_f_cells = ad_ex_ia_f_cells + def add_adExIaFCell(self, value): + self.ad_ex_ia_f_cells.append(value) + def insert_adExIaFCell_at(self, index, value): + self.ad_ex_ia_f_cells.insert(index, value) + def replace_adExIaFCell_at(self, index, value): + self.ad_ex_ia_f_cells[index] = value + def get_fitzHughNagumoCell(self): + return self.fitz_hugh_nagumo_cells + def set_fitzHughNagumoCell(self, fitz_hugh_nagumo_cells): + self.fitz_hugh_nagumo_cells = fitz_hugh_nagumo_cells + def add_fitzHughNagumoCell(self, value): + self.fitz_hugh_nagumo_cells.append(value) + def insert_fitzHughNagumoCell_at(self, index, value): + self.fitz_hugh_nagumo_cells.insert(index, value) + def replace_fitzHughNagumoCell_at(self, index, value): + self.fitz_hugh_nagumo_cells[index] = value + def get_fitzHughNagumo1969Cell(self): + return self.fitz_hugh_nagumo1969_cells + def set_fitzHughNagumo1969Cell(self, fitz_hugh_nagumo1969_cells): + self.fitz_hugh_nagumo1969_cells = fitz_hugh_nagumo1969_cells + def add_fitzHughNagumo1969Cell(self, value): + self.fitz_hugh_nagumo1969_cells.append(value) + def insert_fitzHughNagumo1969Cell_at(self, index, value): + self.fitz_hugh_nagumo1969_cells.insert(index, value) + def replace_fitzHughNagumo1969Cell_at(self, index, value): + self.fitz_hugh_nagumo1969_cells[index] = value + def get_pinskyRinzelCA3Cell(self): + return self.pinsky_rinzel_ca3_cells + def set_pinskyRinzelCA3Cell(self, pinsky_rinzel_ca3_cells): + self.pinsky_rinzel_ca3_cells = pinsky_rinzel_ca3_cells + def add_pinskyRinzelCA3Cell(self, value): + self.pinsky_rinzel_ca3_cells.append(value) + def insert_pinskyRinzelCA3Cell_at(self, index, value): + self.pinsky_rinzel_ca3_cells.insert(index, value) + def replace_pinskyRinzelCA3Cell_at(self, index, value): + self.pinsky_rinzel_ca3_cells[index] = value + def get_pulseGenerator(self): + return self.pulse_generators + def set_pulseGenerator(self, pulse_generators): + self.pulse_generators = pulse_generators + def add_pulseGenerator(self, value): + self.pulse_generators.append(value) + def insert_pulseGenerator_at(self, index, value): + self.pulse_generators.insert(index, value) + def replace_pulseGenerator_at(self, index, value): + self.pulse_generators[index] = value + def get_pulseGeneratorDL(self): + return self.pulse_generator_dls + def set_pulseGeneratorDL(self, pulse_generator_dls): + self.pulse_generator_dls = pulse_generator_dls + def add_pulseGeneratorDL(self, value): + self.pulse_generator_dls.append(value) + def insert_pulseGeneratorDL_at(self, index, value): + self.pulse_generator_dls.insert(index, value) + def replace_pulseGeneratorDL_at(self, index, value): + self.pulse_generator_dls[index] = value + def get_sineGenerator(self): + return self.sine_generators + def set_sineGenerator(self, sine_generators): + self.sine_generators = sine_generators + def add_sineGenerator(self, value): + self.sine_generators.append(value) + def insert_sineGenerator_at(self, index, value): + self.sine_generators.insert(index, value) + def replace_sineGenerator_at(self, index, value): + self.sine_generators[index] = value + def get_sineGeneratorDL(self): + return self.sine_generator_dls + def set_sineGeneratorDL(self, sine_generator_dls): + self.sine_generator_dls = sine_generator_dls + def add_sineGeneratorDL(self, value): + self.sine_generator_dls.append(value) + def insert_sineGeneratorDL_at(self, index, value): + self.sine_generator_dls.insert(index, value) + def replace_sineGeneratorDL_at(self, index, value): + self.sine_generator_dls[index] = value + def get_rampGenerator(self): + return self.ramp_generators + def set_rampGenerator(self, ramp_generators): + self.ramp_generators = ramp_generators + def add_rampGenerator(self, value): + self.ramp_generators.append(value) + def insert_rampGenerator_at(self, index, value): + self.ramp_generators.insert(index, value) + def replace_rampGenerator_at(self, index, value): + self.ramp_generators[index] = value + def get_rampGeneratorDL(self): + return self.ramp_generator_dls + def set_rampGeneratorDL(self, ramp_generator_dls): + self.ramp_generator_dls = ramp_generator_dls + def add_rampGeneratorDL(self, value): + self.ramp_generator_dls.append(value) + def insert_rampGeneratorDL_at(self, index, value): + self.ramp_generator_dls.insert(index, value) + def replace_rampGeneratorDL_at(self, index, value): + self.ramp_generator_dls[index] = value + def get_compoundInput(self): + return self.compound_inputs + def set_compoundInput(self, compound_inputs): + self.compound_inputs = compound_inputs + def add_compoundInput(self, value): + self.compound_inputs.append(value) + def insert_compoundInput_at(self, index, value): + self.compound_inputs.insert(index, value) + def replace_compoundInput_at(self, index, value): + self.compound_inputs[index] = value + def get_compoundInputDL(self): + return self.compound_input_dls + def set_compoundInputDL(self, compound_input_dls): + self.compound_input_dls = compound_input_dls + def add_compoundInputDL(self, value): + self.compound_input_dls.append(value) + def insert_compoundInputDL_at(self, index, value): + self.compound_input_dls.insert(index, value) + def replace_compoundInputDL_at(self, index, value): + self.compound_input_dls[index] = value + def get_voltageClamp(self): + return self.voltage_clamps + def set_voltageClamp(self, voltage_clamps): + self.voltage_clamps = voltage_clamps + def add_voltageClamp(self, value): + self.voltage_clamps.append(value) + def insert_voltageClamp_at(self, index, value): + self.voltage_clamps.insert(index, value) + def replace_voltageClamp_at(self, index, value): + self.voltage_clamps[index] = value + def get_voltageClampTriple(self): + return self.voltage_clamp_triples + def set_voltageClampTriple(self, voltage_clamp_triples): + self.voltage_clamp_triples = voltage_clamp_triples + def add_voltageClampTriple(self, value): + self.voltage_clamp_triples.append(value) + def insert_voltageClampTriple_at(self, index, value): + self.voltage_clamp_triples.insert(index, value) + def replace_voltageClampTriple_at(self, index, value): + self.voltage_clamp_triples[index] = value + def get_spikeArray(self): + return self.spike_arrays + def set_spikeArray(self, spike_arrays): + self.spike_arrays = spike_arrays + def add_spikeArray(self, value): + self.spike_arrays.append(value) + def insert_spikeArray_at(self, index, value): + self.spike_arrays.insert(index, value) + def replace_spikeArray_at(self, index, value): + self.spike_arrays[index] = value + def get_timedSynapticInput(self): + return self.timed_synaptic_inputs + def set_timedSynapticInput(self, timed_synaptic_inputs): + self.timed_synaptic_inputs = timed_synaptic_inputs + def add_timedSynapticInput(self, value): + self.timed_synaptic_inputs.append(value) + def insert_timedSynapticInput_at(self, index, value): + self.timed_synaptic_inputs.insert(index, value) + def replace_timedSynapticInput_at(self, index, value): + self.timed_synaptic_inputs[index] = value + def get_spikeGenerator(self): + return self.spike_generators + def set_spikeGenerator(self, spike_generators): + self.spike_generators = spike_generators + def add_spikeGenerator(self, value): + self.spike_generators.append(value) + def insert_spikeGenerator_at(self, index, value): + self.spike_generators.insert(index, value) + def replace_spikeGenerator_at(self, index, value): + self.spike_generators[index] = value + def get_spikeGeneratorRandom(self): + return self.spike_generator_randoms + def set_spikeGeneratorRandom(self, spike_generator_randoms): + self.spike_generator_randoms = spike_generator_randoms + def add_spikeGeneratorRandom(self, value): + self.spike_generator_randoms.append(value) + def insert_spikeGeneratorRandom_at(self, index, value): + self.spike_generator_randoms.insert(index, value) + def replace_spikeGeneratorRandom_at(self, index, value): + self.spike_generator_randoms[index] = value + def get_spikeGeneratorPoisson(self): + return self.spike_generator_poissons + def set_spikeGeneratorPoisson(self, spike_generator_poissons): + self.spike_generator_poissons = spike_generator_poissons + def add_spikeGeneratorPoisson(self, value): + self.spike_generator_poissons.append(value) + def insert_spikeGeneratorPoisson_at(self, index, value): + self.spike_generator_poissons.insert(index, value) + def replace_spikeGeneratorPoisson_at(self, index, value): + self.spike_generator_poissons[index] = value + def get_spikeGeneratorRefPoisson(self): + return self.spike_generator_ref_poissons + def set_spikeGeneratorRefPoisson(self, spike_generator_ref_poissons): + self.spike_generator_ref_poissons = spike_generator_ref_poissons + def add_spikeGeneratorRefPoisson(self, value): + self.spike_generator_ref_poissons.append(value) + def insert_spikeGeneratorRefPoisson_at(self, index, value): + self.spike_generator_ref_poissons.insert(index, value) + def replace_spikeGeneratorRefPoisson_at(self, index, value): + self.spike_generator_ref_poissons[index] = value + def get_poissonFiringSynapse(self): + return self.poisson_firing_synapses + def set_poissonFiringSynapse(self, poisson_firing_synapses): + self.poisson_firing_synapses = poisson_firing_synapses + def add_poissonFiringSynapse(self, value): + self.poisson_firing_synapses.append(value) + def insert_poissonFiringSynapse_at(self, index, value): + self.poisson_firing_synapses.insert(index, value) + def replace_poissonFiringSynapse_at(self, index, value): + self.poisson_firing_synapses[index] = value + def get_transientPoissonFiringSynapse(self): + return self.transient_poisson_firing_synapses + def set_transientPoissonFiringSynapse(self, transient_poisson_firing_synapses): + self.transient_poisson_firing_synapses = transient_poisson_firing_synapses + def add_transientPoissonFiringSynapse(self, value): + self.transient_poisson_firing_synapses.append(value) + def insert_transientPoissonFiringSynapse_at(self, index, value): + self.transient_poisson_firing_synapses.insert(index, value) + def replace_transientPoissonFiringSynapse_at(self, index, value): + self.transient_poisson_firing_synapses[index] = value + def get_IF_curr_alpha(self): + return self.IF_curr_alpha + def set_IF_curr_alpha(self, IF_curr_alpha): + self.IF_curr_alpha = IF_curr_alpha + def add_IF_curr_alpha(self, value): + self.IF_curr_alpha.append(value) + def insert_IF_curr_alpha_at(self, index, value): + self.IF_curr_alpha.insert(index, value) + def replace_IF_curr_alpha_at(self, index, value): + self.IF_curr_alpha[index] = value + def get_IF_curr_exp(self): + return self.IF_curr_exp + def set_IF_curr_exp(self, IF_curr_exp): + self.IF_curr_exp = IF_curr_exp + def add_IF_curr_exp(self, value): + self.IF_curr_exp.append(value) + def insert_IF_curr_exp_at(self, index, value): + self.IF_curr_exp.insert(index, value) + def replace_IF_curr_exp_at(self, index, value): + self.IF_curr_exp[index] = value + def get_IF_cond_alpha(self): + return self.IF_cond_alpha + def set_IF_cond_alpha(self, IF_cond_alpha): + self.IF_cond_alpha = IF_cond_alpha + def add_IF_cond_alpha(self, value): + self.IF_cond_alpha.append(value) + def insert_IF_cond_alpha_at(self, index, value): + self.IF_cond_alpha.insert(index, value) + def replace_IF_cond_alpha_at(self, index, value): + self.IF_cond_alpha[index] = value + def get_IF_cond_exp(self): + return self.IF_cond_exp + def set_IF_cond_exp(self, IF_cond_exp): + self.IF_cond_exp = IF_cond_exp + def add_IF_cond_exp(self, value): + self.IF_cond_exp.append(value) + def insert_IF_cond_exp_at(self, index, value): + self.IF_cond_exp.insert(index, value) + def replace_IF_cond_exp_at(self, index, value): + self.IF_cond_exp[index] = value + def get_EIF_cond_exp_isfa_ista(self): + return self.EIF_cond_exp_isfa_ista + def set_EIF_cond_exp_isfa_ista(self, EIF_cond_exp_isfa_ista): + self.EIF_cond_exp_isfa_ista = EIF_cond_exp_isfa_ista + def add_EIF_cond_exp_isfa_ista(self, value): + self.EIF_cond_exp_isfa_ista.append(value) + def insert_EIF_cond_exp_isfa_ista_at(self, index, value): + self.EIF_cond_exp_isfa_ista.insert(index, value) + def replace_EIF_cond_exp_isfa_ista_at(self, index, value): + self.EIF_cond_exp_isfa_ista[index] = value + def get_EIF_cond_alpha_isfa_ista(self): + return self.EIF_cond_alpha_isfa_ista + def set_EIF_cond_alpha_isfa_ista(self, EIF_cond_alpha_isfa_ista): + self.EIF_cond_alpha_isfa_ista = EIF_cond_alpha_isfa_ista + def add_EIF_cond_alpha_isfa_ista(self, value): + self.EIF_cond_alpha_isfa_ista.append(value) + def insert_EIF_cond_alpha_isfa_ista_at(self, index, value): + self.EIF_cond_alpha_isfa_ista.insert(index, value) + def replace_EIF_cond_alpha_isfa_ista_at(self, index, value): + self.EIF_cond_alpha_isfa_ista[index] = value + def get_HH_cond_exp(self): + return self.HH_cond_exp + def set_HH_cond_exp(self, HH_cond_exp): + self.HH_cond_exp = HH_cond_exp + def add_HH_cond_exp(self, value): + self.HH_cond_exp.append(value) + def insert_HH_cond_exp_at(self, index, value): + self.HH_cond_exp.insert(index, value) + def replace_HH_cond_exp_at(self, index, value): + self.HH_cond_exp[index] = value + def get_expCondSynapse(self): + return self.exp_cond_synapses + def set_expCondSynapse(self, exp_cond_synapses): + self.exp_cond_synapses = exp_cond_synapses + def add_expCondSynapse(self, value): + self.exp_cond_synapses.append(value) + def insert_expCondSynapse_at(self, index, value): + self.exp_cond_synapses.insert(index, value) + def replace_expCondSynapse_at(self, index, value): + self.exp_cond_synapses[index] = value + def get_alphaCondSynapse(self): + return self.alpha_cond_synapses + def set_alphaCondSynapse(self, alpha_cond_synapses): + self.alpha_cond_synapses = alpha_cond_synapses + def add_alphaCondSynapse(self, value): + self.alpha_cond_synapses.append(value) + def insert_alphaCondSynapse_at(self, index, value): + self.alpha_cond_synapses.insert(index, value) + def replace_alphaCondSynapse_at(self, index, value): + self.alpha_cond_synapses[index] = value + def get_expCurrSynapse(self): + return self.exp_curr_synapses + def set_expCurrSynapse(self, exp_curr_synapses): + self.exp_curr_synapses = exp_curr_synapses + def add_expCurrSynapse(self, value): + self.exp_curr_synapses.append(value) + def insert_expCurrSynapse_at(self, index, value): + self.exp_curr_synapses.insert(index, value) + def replace_expCurrSynapse_at(self, index, value): + self.exp_curr_synapses[index] = value + def get_alphaCurrSynapse(self): + return self.alpha_curr_synapses + def set_alphaCurrSynapse(self, alpha_curr_synapses): + self.alpha_curr_synapses = alpha_curr_synapses + def add_alphaCurrSynapse(self, value): + self.alpha_curr_synapses.append(value) + def insert_alphaCurrSynapse_at(self, index, value): + self.alpha_curr_synapses.insert(index, value) + def replace_alphaCurrSynapse_at(self, index, value): + self.alpha_curr_synapses[index] = value + def get_SpikeSourcePoisson(self): + return self.SpikeSourcePoisson + def set_SpikeSourcePoisson(self, SpikeSourcePoisson): + self.SpikeSourcePoisson = SpikeSourcePoisson + def add_SpikeSourcePoisson(self, value): + self.SpikeSourcePoisson.append(value) + def insert_SpikeSourcePoisson_at(self, index, value): + self.SpikeSourcePoisson.insert(index, value) + def replace_SpikeSourcePoisson_at(self, index, value): + self.SpikeSourcePoisson[index] = value + def get_network(self): + return self.networks + def set_network(self, networks): + self.networks = networks + def add_network(self, value): + self.networks.append(value) + def insert_network_at(self, index, value): + self.networks.insert(index, value) + def replace_network_at(self, index, value): + self.networks[index] = value + def get_ComponentType(self): + return self.ComponentType + def set_ComponentType(self, ComponentType): + self.ComponentType = ComponentType + def add_ComponentType(self, value): + self.ComponentType.append(value) + def insert_ComponentType_at(self, index, value): + self.ComponentType.insert(index, value) + def replace_ComponentType_at(self, index, value): + self.ComponentType[index] = value + def _hasContent(self): if ( self.includes or self.extracellular_properties or @@ -15627,12 +21624,12 @@ def hasContent_(self): self.SpikeSourcePoisson or self.networks or self.ComponentType or - super(NeuroMLDocument, self).hasContent_() + super(NeuroMLDocument, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NeuroMLDocument', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='NeuroMLDocument', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('NeuroMLDocument') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -15640,517 +21637,590 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='N eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'NeuroMLDocument': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NeuroMLDocument') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NeuroMLDocument') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NeuroMLDocument', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NeuroMLDocument', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NeuroMLDocument'): - super(NeuroMLDocument, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NeuroMLDocument') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NeuroMLDocument', fromsubclass_=False, pretty_print=True): - super(NeuroMLDocument, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NeuroMLDocument'): + super(NeuroMLDocument, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NeuroMLDocument') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='NeuroMLDocument', fromsubclass_=False, pretty_print=True): + super(NeuroMLDocument, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for include_ in self.includes: + namespaceprefix_ = self.includes_nsprefix_ + ':' if (UseCapturedNS_ and self.includes_nsprefix_) else '' include_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='include', pretty_print=pretty_print) for extracellularProperties_ in self.extracellular_properties: + namespaceprefix_ = self.extracellular_properties_nsprefix_ + ':' if (UseCapturedNS_ and self.extracellular_properties_nsprefix_) else '' extracellularProperties_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='extracellularProperties', pretty_print=pretty_print) for intracellularProperties_ in self.intracellular_properties: + namespaceprefix_ = self.intracellular_properties_nsprefix_ + ':' if (UseCapturedNS_ and self.intracellular_properties_nsprefix_) else '' intracellularProperties_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='intracellularProperties', pretty_print=pretty_print) for morphology_ in self.morphology: + namespaceprefix_ = self.morphology_nsprefix_ + ':' if (UseCapturedNS_ and self.morphology_nsprefix_) else '' morphology_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='morphology', pretty_print=pretty_print) for ionChannel_ in self.ion_channel: + namespaceprefix_ = self.ion_channel_nsprefix_ + ':' if (UseCapturedNS_ and self.ion_channel_nsprefix_) else '' ionChannel_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ionChannel', pretty_print=pretty_print) for ionChannelHH_ in self.ion_channel_hhs: + namespaceprefix_ = self.ion_channel_hhs_nsprefix_ + ':' if (UseCapturedNS_ and self.ion_channel_hhs_nsprefix_) else '' ionChannelHH_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ionChannelHH', pretty_print=pretty_print) for ionChannelVShift_ in self.ion_channel_v_shifts: + namespaceprefix_ = self.ion_channel_v_shifts_nsprefix_ + ':' if (UseCapturedNS_ and self.ion_channel_v_shifts_nsprefix_) else '' ionChannelVShift_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ionChannelVShift', pretty_print=pretty_print) for ionChannelKS_ in self.ion_channel_kses: + namespaceprefix_ = self.ion_channel_kses_nsprefix_ + ':' if (UseCapturedNS_ and self.ion_channel_kses_nsprefix_) else '' ionChannelKS_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ionChannelKS', pretty_print=pretty_print) for decayingPoolConcentrationModel_ in self.decaying_pool_concentration_models: + namespaceprefix_ = self.decaying_pool_concentration_models_nsprefix_ + ':' if (UseCapturedNS_ and self.decaying_pool_concentration_models_nsprefix_) else '' decayingPoolConcentrationModel_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='decayingPoolConcentrationModel', pretty_print=pretty_print) for fixedFactorConcentrationModel_ in self.fixed_factor_concentration_models: + namespaceprefix_ = self.fixed_factor_concentration_models_nsprefix_ + ':' if (UseCapturedNS_ and self.fixed_factor_concentration_models_nsprefix_) else '' fixedFactorConcentrationModel_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='fixedFactorConcentrationModel', pretty_print=pretty_print) for alphaCurrentSynapse_ in self.alpha_current_synapses: + namespaceprefix_ = self.alpha_current_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.alpha_current_synapses_nsprefix_) else '' alphaCurrentSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='alphaCurrentSynapse', pretty_print=pretty_print) for alphaSynapse_ in self.alpha_synapses: + namespaceprefix_ = self.alpha_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.alpha_synapses_nsprefix_) else '' alphaSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='alphaSynapse', pretty_print=pretty_print) for expOneSynapse_ in self.exp_one_synapses: + namespaceprefix_ = self.exp_one_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.exp_one_synapses_nsprefix_) else '' expOneSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expOneSynapse', pretty_print=pretty_print) for expTwoSynapse_ in self.exp_two_synapses: + namespaceprefix_ = self.exp_two_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.exp_two_synapses_nsprefix_) else '' expTwoSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expTwoSynapse', pretty_print=pretty_print) for expThreeSynapse_ in self.exp_three_synapses: + namespaceprefix_ = self.exp_three_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.exp_three_synapses_nsprefix_) else '' expThreeSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expThreeSynapse', pretty_print=pretty_print) for blockingPlasticSynapse_ in self.blocking_plastic_synapses: + namespaceprefix_ = self.blocking_plastic_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.blocking_plastic_synapses_nsprefix_) else '' blockingPlasticSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='blockingPlasticSynapse', pretty_print=pretty_print) for doubleSynapse_ in self.double_synapses: + namespaceprefix_ = self.double_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.double_synapses_nsprefix_) else '' doubleSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='doubleSynapse', pretty_print=pretty_print) for gapJunction_ in self.gap_junctions: + namespaceprefix_ = self.gap_junctions_nsprefix_ + ':' if (UseCapturedNS_ and self.gap_junctions_nsprefix_) else '' gapJunction_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gapJunction', pretty_print=pretty_print) for silentSynapse_ in self.silent_synapses: + namespaceprefix_ = self.silent_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.silent_synapses_nsprefix_) else '' silentSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='silentSynapse', pretty_print=pretty_print) for linearGradedSynapse_ in self.linear_graded_synapses: + namespaceprefix_ = self.linear_graded_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.linear_graded_synapses_nsprefix_) else '' linearGradedSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='linearGradedSynapse', pretty_print=pretty_print) for gradedSynapse_ in self.graded_synapses: + namespaceprefix_ = self.graded_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.graded_synapses_nsprefix_) else '' gradedSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gradedSynapse', pretty_print=pretty_print) for biophysicalProperties_ in self.biophysical_properties: + namespaceprefix_ = self.biophysical_properties_nsprefix_ + ':' if (UseCapturedNS_ and self.biophysical_properties_nsprefix_) else '' biophysicalProperties_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='biophysicalProperties', pretty_print=pretty_print) for cell_ in self.cells: + namespaceprefix_ = self.cells_nsprefix_ + ':' if (UseCapturedNS_ and self.cells_nsprefix_) else '' cell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='cell', pretty_print=pretty_print) for cell2CaPools_ in self.cell2_ca_poolses: + namespaceprefix_ = self.cell2_ca_poolses_nsprefix_ + ':' if (UseCapturedNS_ and self.cell2_ca_poolses_nsprefix_) else '' cell2CaPools_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='cell2CaPools', pretty_print=pretty_print) for baseCell_ in self.base_cells: + namespaceprefix_ = self.base_cells_nsprefix_ + ':' if (UseCapturedNS_ and self.base_cells_nsprefix_) else '' baseCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='baseCell', pretty_print=pretty_print) for iafTauCell_ in self.iaf_tau_cells: + namespaceprefix_ = self.iaf_tau_cells_nsprefix_ + ':' if (UseCapturedNS_ and self.iaf_tau_cells_nsprefix_) else '' iafTauCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='iafTauCell', pretty_print=pretty_print) for iafTauRefCell_ in self.iaf_tau_ref_cells: + namespaceprefix_ = self.iaf_tau_ref_cells_nsprefix_ + ':' if (UseCapturedNS_ and self.iaf_tau_ref_cells_nsprefix_) else '' iafTauRefCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='iafTauRefCell', pretty_print=pretty_print) for iafCell_ in self.iaf_cells: + namespaceprefix_ = self.iaf_cells_nsprefix_ + ':' if (UseCapturedNS_ and self.iaf_cells_nsprefix_) else '' iafCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='iafCell', pretty_print=pretty_print) for iafRefCell_ in self.iaf_ref_cells: + namespaceprefix_ = self.iaf_ref_cells_nsprefix_ + ':' if (UseCapturedNS_ and self.iaf_ref_cells_nsprefix_) else '' iafRefCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='iafRefCell', pretty_print=pretty_print) for izhikevichCell_ in self.izhikevich_cells: + namespaceprefix_ = self.izhikevich_cells_nsprefix_ + ':' if (UseCapturedNS_ and self.izhikevich_cells_nsprefix_) else '' izhikevichCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='izhikevichCell', pretty_print=pretty_print) for izhikevich2007Cell_ in self.izhikevich2007_cells: + namespaceprefix_ = self.izhikevich2007_cells_nsprefix_ + ':' if (UseCapturedNS_ and self.izhikevich2007_cells_nsprefix_) else '' izhikevich2007Cell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='izhikevich2007Cell', pretty_print=pretty_print) for adExIaFCell_ in self.ad_ex_ia_f_cells: + namespaceprefix_ = self.ad_ex_ia_f_cells_nsprefix_ + ':' if (UseCapturedNS_ and self.ad_ex_ia_f_cells_nsprefix_) else '' adExIaFCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='adExIaFCell', pretty_print=pretty_print) for fitzHughNagumoCell_ in self.fitz_hugh_nagumo_cells: + namespaceprefix_ = self.fitz_hugh_nagumo_cells_nsprefix_ + ':' if (UseCapturedNS_ and self.fitz_hugh_nagumo_cells_nsprefix_) else '' fitzHughNagumoCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='fitzHughNagumoCell', pretty_print=pretty_print) for fitzHughNagumo1969Cell_ in self.fitz_hugh_nagumo1969_cells: + namespaceprefix_ = self.fitz_hugh_nagumo1969_cells_nsprefix_ + ':' if (UseCapturedNS_ and self.fitz_hugh_nagumo1969_cells_nsprefix_) else '' fitzHughNagumo1969Cell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='fitzHughNagumo1969Cell', pretty_print=pretty_print) for pinskyRinzelCA3Cell_ in self.pinsky_rinzel_ca3_cells: + namespaceprefix_ = self.pinsky_rinzel_ca3_cells_nsprefix_ + ':' if (UseCapturedNS_ and self.pinsky_rinzel_ca3_cells_nsprefix_) else '' pinskyRinzelCA3Cell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pinskyRinzelCA3Cell', pretty_print=pretty_print) for pulseGenerator_ in self.pulse_generators: + namespaceprefix_ = self.pulse_generators_nsprefix_ + ':' if (UseCapturedNS_ and self.pulse_generators_nsprefix_) else '' pulseGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pulseGenerator', pretty_print=pretty_print) for pulseGeneratorDL_ in self.pulse_generator_dls: + namespaceprefix_ = self.pulse_generator_dls_nsprefix_ + ':' if (UseCapturedNS_ and self.pulse_generator_dls_nsprefix_) else '' pulseGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pulseGeneratorDL', pretty_print=pretty_print) for sineGenerator_ in self.sine_generators: + namespaceprefix_ = self.sine_generators_nsprefix_ + ':' if (UseCapturedNS_ and self.sine_generators_nsprefix_) else '' sineGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='sineGenerator', pretty_print=pretty_print) for sineGeneratorDL_ in self.sine_generator_dls: + namespaceprefix_ = self.sine_generator_dls_nsprefix_ + ':' if (UseCapturedNS_ and self.sine_generator_dls_nsprefix_) else '' sineGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='sineGeneratorDL', pretty_print=pretty_print) for rampGenerator_ in self.ramp_generators: + namespaceprefix_ = self.ramp_generators_nsprefix_ + ':' if (UseCapturedNS_ and self.ramp_generators_nsprefix_) else '' rampGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='rampGenerator', pretty_print=pretty_print) for rampGeneratorDL_ in self.ramp_generator_dls: + namespaceprefix_ = self.ramp_generator_dls_nsprefix_ + ':' if (UseCapturedNS_ and self.ramp_generator_dls_nsprefix_) else '' rampGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='rampGeneratorDL', pretty_print=pretty_print) for compoundInput_ in self.compound_inputs: + namespaceprefix_ = self.compound_inputs_nsprefix_ + ':' if (UseCapturedNS_ and self.compound_inputs_nsprefix_) else '' compoundInput_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='compoundInput', pretty_print=pretty_print) for compoundInputDL_ in self.compound_input_dls: + namespaceprefix_ = self.compound_input_dls_nsprefix_ + ':' if (UseCapturedNS_ and self.compound_input_dls_nsprefix_) else '' compoundInputDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='compoundInputDL', pretty_print=pretty_print) for voltageClamp_ in self.voltage_clamps: + namespaceprefix_ = self.voltage_clamps_nsprefix_ + ':' if (UseCapturedNS_ and self.voltage_clamps_nsprefix_) else '' voltageClamp_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='voltageClamp', pretty_print=pretty_print) for voltageClampTriple_ in self.voltage_clamp_triples: + namespaceprefix_ = self.voltage_clamp_triples_nsprefix_ + ':' if (UseCapturedNS_ and self.voltage_clamp_triples_nsprefix_) else '' voltageClampTriple_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='voltageClampTriple', pretty_print=pretty_print) for spikeArray_ in self.spike_arrays: + namespaceprefix_ = self.spike_arrays_nsprefix_ + ':' if (UseCapturedNS_ and self.spike_arrays_nsprefix_) else '' spikeArray_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeArray', pretty_print=pretty_print) for timedSynapticInput_ in self.timed_synaptic_inputs: + namespaceprefix_ = self.timed_synaptic_inputs_nsprefix_ + ':' if (UseCapturedNS_ and self.timed_synaptic_inputs_nsprefix_) else '' timedSynapticInput_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timedSynapticInput', pretty_print=pretty_print) for spikeGenerator_ in self.spike_generators: + namespaceprefix_ = self.spike_generators_nsprefix_ + ':' if (UseCapturedNS_ and self.spike_generators_nsprefix_) else '' spikeGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeGenerator', pretty_print=pretty_print) for spikeGeneratorRandom_ in self.spike_generator_randoms: + namespaceprefix_ = self.spike_generator_randoms_nsprefix_ + ':' if (UseCapturedNS_ and self.spike_generator_randoms_nsprefix_) else '' spikeGeneratorRandom_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeGeneratorRandom', pretty_print=pretty_print) for spikeGeneratorPoisson_ in self.spike_generator_poissons: + namespaceprefix_ = self.spike_generator_poissons_nsprefix_ + ':' if (UseCapturedNS_ and self.spike_generator_poissons_nsprefix_) else '' spikeGeneratorPoisson_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeGeneratorPoisson', pretty_print=pretty_print) for spikeGeneratorRefPoisson_ in self.spike_generator_ref_poissons: + namespaceprefix_ = self.spike_generator_ref_poissons_nsprefix_ + ':' if (UseCapturedNS_ and self.spike_generator_ref_poissons_nsprefix_) else '' spikeGeneratorRefPoisson_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeGeneratorRefPoisson', pretty_print=pretty_print) for poissonFiringSynapse_ in self.poisson_firing_synapses: + namespaceprefix_ = self.poisson_firing_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.poisson_firing_synapses_nsprefix_) else '' poissonFiringSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='poissonFiringSynapse', pretty_print=pretty_print) for transientPoissonFiringSynapse_ in self.transient_poisson_firing_synapses: + namespaceprefix_ = self.transient_poisson_firing_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.transient_poisson_firing_synapses_nsprefix_) else '' transientPoissonFiringSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='transientPoissonFiringSynapse', pretty_print=pretty_print) for IF_curr_alpha_ in self.IF_curr_alpha: + namespaceprefix_ = self.IF_curr_alpha_nsprefix_ + ':' if (UseCapturedNS_ and self.IF_curr_alpha_nsprefix_) else '' IF_curr_alpha_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IF_curr_alpha', pretty_print=pretty_print) for IF_curr_exp_ in self.IF_curr_exp: + namespaceprefix_ = self.IF_curr_exp_nsprefix_ + ':' if (UseCapturedNS_ and self.IF_curr_exp_nsprefix_) else '' IF_curr_exp_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IF_curr_exp', pretty_print=pretty_print) for IF_cond_alpha_ in self.IF_cond_alpha: + namespaceprefix_ = self.IF_cond_alpha_nsprefix_ + ':' if (UseCapturedNS_ and self.IF_cond_alpha_nsprefix_) else '' IF_cond_alpha_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IF_cond_alpha', pretty_print=pretty_print) for IF_cond_exp_ in self.IF_cond_exp: + namespaceprefix_ = self.IF_cond_exp_nsprefix_ + ':' if (UseCapturedNS_ and self.IF_cond_exp_nsprefix_) else '' IF_cond_exp_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IF_cond_exp', pretty_print=pretty_print) for EIF_cond_exp_isfa_ista_ in self.EIF_cond_exp_isfa_ista: + namespaceprefix_ = self.EIF_cond_exp_isfa_ista_nsprefix_ + ':' if (UseCapturedNS_ and self.EIF_cond_exp_isfa_ista_nsprefix_) else '' EIF_cond_exp_isfa_ista_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EIF_cond_exp_isfa_ista', pretty_print=pretty_print) for EIF_cond_alpha_isfa_ista_ in self.EIF_cond_alpha_isfa_ista: + namespaceprefix_ = self.EIF_cond_alpha_isfa_ista_nsprefix_ + ':' if (UseCapturedNS_ and self.EIF_cond_alpha_isfa_ista_nsprefix_) else '' EIF_cond_alpha_isfa_ista_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EIF_cond_alpha_isfa_ista', pretty_print=pretty_print) for HH_cond_exp_ in self.HH_cond_exp: + namespaceprefix_ = self.HH_cond_exp_nsprefix_ + ':' if (UseCapturedNS_ and self.HH_cond_exp_nsprefix_) else '' HH_cond_exp_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='HH_cond_exp', pretty_print=pretty_print) for expCondSynapse_ in self.exp_cond_synapses: + namespaceprefix_ = self.exp_cond_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.exp_cond_synapses_nsprefix_) else '' expCondSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expCondSynapse', pretty_print=pretty_print) for alphaCondSynapse_ in self.alpha_cond_synapses: + namespaceprefix_ = self.alpha_cond_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.alpha_cond_synapses_nsprefix_) else '' alphaCondSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='alphaCondSynapse', pretty_print=pretty_print) for expCurrSynapse_ in self.exp_curr_synapses: + namespaceprefix_ = self.exp_curr_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.exp_curr_synapses_nsprefix_) else '' expCurrSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expCurrSynapse', pretty_print=pretty_print) for alphaCurrSynapse_ in self.alpha_curr_synapses: + namespaceprefix_ = self.alpha_curr_synapses_nsprefix_ + ':' if (UseCapturedNS_ and self.alpha_curr_synapses_nsprefix_) else '' alphaCurrSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='alphaCurrSynapse', pretty_print=pretty_print) for SpikeSourcePoisson_ in self.SpikeSourcePoisson: + namespaceprefix_ = self.SpikeSourcePoisson_nsprefix_ + ':' if (UseCapturedNS_ and self.SpikeSourcePoisson_nsprefix_) else '' SpikeSourcePoisson_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SpikeSourcePoisson', pretty_print=pretty_print) for network_ in self.networks: + namespaceprefix_ = self.networks_nsprefix_ + ':' if (UseCapturedNS_ and self.networks_nsprefix_) else '' network_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='network', pretty_print=pretty_print) for ComponentType_ in self.ComponentType: + namespaceprefix_ = self.ComponentType_nsprefix_ + ':' if (UseCapturedNS_ and self.ComponentType_nsprefix_) else '' ComponentType_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ComponentType', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(NeuroMLDocument, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(NeuroMLDocument, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'include': obj_ = IncludeType.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.includes.append(obj_) obj_.original_tagname_ = 'include' elif nodeName_ == 'extracellularProperties': obj_ = ExtracellularProperties.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.extracellular_properties.append(obj_) obj_.original_tagname_ = 'extracellularProperties' elif nodeName_ == 'intracellularProperties': class_obj_ = self.get_class_obj_(child_, IntracellularProperties) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.intracellular_properties.append(obj_) obj_.original_tagname_ = 'intracellularProperties' elif nodeName_ == 'morphology': obj_ = Morphology.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.morphology.append(obj_) obj_.original_tagname_ = 'morphology' elif nodeName_ == 'ionChannel': class_obj_ = self.get_class_obj_(child_, IonChannel) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ion_channel.append(obj_) obj_.original_tagname_ = 'ionChannel' elif nodeName_ == 'ionChannelHH': obj_ = IonChannelHH.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ion_channel_hhs.append(obj_) obj_.original_tagname_ = 'ionChannelHH' elif nodeName_ == 'ionChannelVShift': obj_ = IonChannelVShift.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ion_channel_v_shifts.append(obj_) obj_.original_tagname_ = 'ionChannelVShift' elif nodeName_ == 'ionChannelKS': obj_ = IonChannelKS.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ion_channel_kses.append(obj_) obj_.original_tagname_ = 'ionChannelKS' elif nodeName_ == 'decayingPoolConcentrationModel': class_obj_ = self.get_class_obj_(child_, DecayingPoolConcentrationModel) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.decaying_pool_concentration_models.append(obj_) obj_.original_tagname_ = 'decayingPoolConcentrationModel' elif nodeName_ == 'fixedFactorConcentrationModel': obj_ = FixedFactorConcentrationModel.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.fixed_factor_concentration_models.append(obj_) obj_.original_tagname_ = 'fixedFactorConcentrationModel' elif nodeName_ == 'alphaCurrentSynapse': obj_ = AlphaCurrentSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.alpha_current_synapses.append(obj_) obj_.original_tagname_ = 'alphaCurrentSynapse' elif nodeName_ == 'alphaSynapse': obj_ = AlphaSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.alpha_synapses.append(obj_) obj_.original_tagname_ = 'alphaSynapse' elif nodeName_ == 'expOneSynapse': obj_ = ExpOneSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.exp_one_synapses.append(obj_) obj_.original_tagname_ = 'expOneSynapse' elif nodeName_ == 'expTwoSynapse': class_obj_ = self.get_class_obj_(child_, ExpTwoSynapse) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.exp_two_synapses.append(obj_) obj_.original_tagname_ = 'expTwoSynapse' elif nodeName_ == 'expThreeSynapse': obj_ = ExpThreeSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.exp_three_synapses.append(obj_) obj_.original_tagname_ = 'expThreeSynapse' elif nodeName_ == 'blockingPlasticSynapse': obj_ = BlockingPlasticSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.blocking_plastic_synapses.append(obj_) obj_.original_tagname_ = 'blockingPlasticSynapse' elif nodeName_ == 'doubleSynapse': obj_ = DoubleSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.double_synapses.append(obj_) obj_.original_tagname_ = 'doubleSynapse' elif nodeName_ == 'gapJunction': obj_ = GapJunction.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gap_junctions.append(obj_) obj_.original_tagname_ = 'gapJunction' elif nodeName_ == 'silentSynapse': obj_ = SilentSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.silent_synapses.append(obj_) obj_.original_tagname_ = 'silentSynapse' elif nodeName_ == 'linearGradedSynapse': obj_ = LinearGradedSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.linear_graded_synapses.append(obj_) obj_.original_tagname_ = 'linearGradedSynapse' elif nodeName_ == 'gradedSynapse': obj_ = GradedSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.graded_synapses.append(obj_) obj_.original_tagname_ = 'gradedSynapse' elif nodeName_ == 'biophysicalProperties': obj_ = BiophysicalProperties.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.biophysical_properties.append(obj_) obj_.original_tagname_ = 'biophysicalProperties' elif nodeName_ == 'cell': class_obj_ = self.get_class_obj_(child_, Cell) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.cells.append(obj_) obj_.original_tagname_ = 'cell' elif nodeName_ == 'cell2CaPools': obj_ = Cell2CaPools.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.cell2_ca_poolses.append(obj_) obj_.original_tagname_ = 'cell2CaPools' elif nodeName_ == 'baseCell': class_obj_ = self.get_class_obj_(child_, BaseCell) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.base_cells.append(obj_) obj_.original_tagname_ = 'baseCell' elif nodeName_ == 'iafTauCell': class_obj_ = self.get_class_obj_(child_, IafTauCell) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.iaf_tau_cells.append(obj_) obj_.original_tagname_ = 'iafTauCell' elif nodeName_ == 'iafTauRefCell': obj_ = IafTauRefCell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.iaf_tau_ref_cells.append(obj_) obj_.original_tagname_ = 'iafTauRefCell' elif nodeName_ == 'iafCell': class_obj_ = self.get_class_obj_(child_, IafCell) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.iaf_cells.append(obj_) obj_.original_tagname_ = 'iafCell' elif nodeName_ == 'iafRefCell': obj_ = IafRefCell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.iaf_ref_cells.append(obj_) obj_.original_tagname_ = 'iafRefCell' elif nodeName_ == 'izhikevichCell': obj_ = IzhikevichCell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.izhikevich_cells.append(obj_) obj_.original_tagname_ = 'izhikevichCell' elif nodeName_ == 'izhikevich2007Cell': obj_ = Izhikevich2007Cell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.izhikevich2007_cells.append(obj_) obj_.original_tagname_ = 'izhikevich2007Cell' elif nodeName_ == 'adExIaFCell': obj_ = AdExIaFCell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ad_ex_ia_f_cells.append(obj_) obj_.original_tagname_ = 'adExIaFCell' elif nodeName_ == 'fitzHughNagumoCell': obj_ = FitzHughNagumoCell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.fitz_hugh_nagumo_cells.append(obj_) obj_.original_tagname_ = 'fitzHughNagumoCell' elif nodeName_ == 'fitzHughNagumo1969Cell': obj_ = FitzHughNagumo1969Cell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.fitz_hugh_nagumo1969_cells.append(obj_) obj_.original_tagname_ = 'fitzHughNagumo1969Cell' elif nodeName_ == 'pinskyRinzelCA3Cell': obj_ = PinskyRinzelCA3Cell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.pinsky_rinzel_ca3_cells.append(obj_) obj_.original_tagname_ = 'pinskyRinzelCA3Cell' elif nodeName_ == 'pulseGenerator': obj_ = PulseGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.pulse_generators.append(obj_) obj_.original_tagname_ = 'pulseGenerator' elif nodeName_ == 'pulseGeneratorDL': obj_ = PulseGeneratorDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.pulse_generator_dls.append(obj_) obj_.original_tagname_ = 'pulseGeneratorDL' elif nodeName_ == 'sineGenerator': obj_ = SineGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sine_generators.append(obj_) obj_.original_tagname_ = 'sineGenerator' elif nodeName_ == 'sineGeneratorDL': obj_ = SineGeneratorDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sine_generator_dls.append(obj_) obj_.original_tagname_ = 'sineGeneratorDL' elif nodeName_ == 'rampGenerator': obj_ = RampGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ramp_generators.append(obj_) obj_.original_tagname_ = 'rampGenerator' elif nodeName_ == 'rampGeneratorDL': obj_ = RampGeneratorDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ramp_generator_dls.append(obj_) obj_.original_tagname_ = 'rampGeneratorDL' elif nodeName_ == 'compoundInput': obj_ = CompoundInput.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.compound_inputs.append(obj_) obj_.original_tagname_ = 'compoundInput' elif nodeName_ == 'compoundInputDL': obj_ = CompoundInputDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.compound_input_dls.append(obj_) obj_.original_tagname_ = 'compoundInputDL' elif nodeName_ == 'voltageClamp': obj_ = VoltageClamp.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.voltage_clamps.append(obj_) obj_.original_tagname_ = 'voltageClamp' elif nodeName_ == 'voltageClampTriple': obj_ = VoltageClampTriple.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.voltage_clamp_triples.append(obj_) obj_.original_tagname_ = 'voltageClampTriple' elif nodeName_ == 'spikeArray': obj_ = SpikeArray.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spike_arrays.append(obj_) obj_.original_tagname_ = 'spikeArray' elif nodeName_ == 'timedSynapticInput': obj_ = TimedSynapticInput.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.timed_synaptic_inputs.append(obj_) obj_.original_tagname_ = 'timedSynapticInput' elif nodeName_ == 'spikeGenerator': obj_ = SpikeGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spike_generators.append(obj_) obj_.original_tagname_ = 'spikeGenerator' elif nodeName_ == 'spikeGeneratorRandom': obj_ = SpikeGeneratorRandom.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spike_generator_randoms.append(obj_) obj_.original_tagname_ = 'spikeGeneratorRandom' elif nodeName_ == 'spikeGeneratorPoisson': class_obj_ = self.get_class_obj_(child_, SpikeGeneratorPoisson) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spike_generator_poissons.append(obj_) obj_.original_tagname_ = 'spikeGeneratorPoisson' elif nodeName_ == 'spikeGeneratorRefPoisson': obj_ = SpikeGeneratorRefPoisson.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spike_generator_ref_poissons.append(obj_) obj_.original_tagname_ = 'spikeGeneratorRefPoisson' elif nodeName_ == 'poissonFiringSynapse': obj_ = PoissonFiringSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.poisson_firing_synapses.append(obj_) obj_.original_tagname_ = 'poissonFiringSynapse' elif nodeName_ == 'transientPoissonFiringSynapse': obj_ = TransientPoissonFiringSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.transient_poisson_firing_synapses.append(obj_) obj_.original_tagname_ = 'transientPoissonFiringSynapse' elif nodeName_ == 'IF_curr_alpha': obj_ = IF_curr_alpha.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.IF_curr_alpha.append(obj_) obj_.original_tagname_ = 'IF_curr_alpha' elif nodeName_ == 'IF_curr_exp': obj_ = IF_curr_exp.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.IF_curr_exp.append(obj_) obj_.original_tagname_ = 'IF_curr_exp' elif nodeName_ == 'IF_cond_alpha': obj_ = IF_cond_alpha.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.IF_cond_alpha.append(obj_) obj_.original_tagname_ = 'IF_cond_alpha' elif nodeName_ == 'IF_cond_exp': obj_ = IF_cond_exp.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.IF_cond_exp.append(obj_) obj_.original_tagname_ = 'IF_cond_exp' elif nodeName_ == 'EIF_cond_exp_isfa_ista': class_obj_ = self.get_class_obj_(child_, EIF_cond_exp_isfa_ista) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.EIF_cond_exp_isfa_ista.append(obj_) obj_.original_tagname_ = 'EIF_cond_exp_isfa_ista' elif nodeName_ == 'EIF_cond_alpha_isfa_ista': obj_ = EIF_cond_alpha_isfa_ista.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.EIF_cond_alpha_isfa_ista.append(obj_) obj_.original_tagname_ = 'EIF_cond_alpha_isfa_ista' elif nodeName_ == 'HH_cond_exp': obj_ = HH_cond_exp.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.HH_cond_exp.append(obj_) obj_.original_tagname_ = 'HH_cond_exp' elif nodeName_ == 'expCondSynapse': obj_ = ExpCondSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.exp_cond_synapses.append(obj_) obj_.original_tagname_ = 'expCondSynapse' elif nodeName_ == 'alphaCondSynapse': obj_ = AlphaCondSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.alpha_cond_synapses.append(obj_) obj_.original_tagname_ = 'alphaCondSynapse' elif nodeName_ == 'expCurrSynapse': obj_ = ExpCurrSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.exp_curr_synapses.append(obj_) obj_.original_tagname_ = 'expCurrSynapse' elif nodeName_ == 'alphaCurrSynapse': obj_ = AlphaCurrSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.alpha_curr_synapses.append(obj_) obj_.original_tagname_ = 'alphaCurrSynapse' elif nodeName_ == 'SpikeSourcePoisson': obj_ = SpikeSourcePoisson.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.SpikeSourcePoisson.append(obj_) obj_.original_tagname_ = 'SpikeSourcePoisson' elif nodeName_ == 'network': obj_ = Network.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.networks.append(obj_) obj_.original_tagname_ = 'network' elif nodeName_ == 'ComponentType': obj_ = ComponentType.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ComponentType.append(obj_) obj_.original_tagname_ = 'ComponentType' - super(NeuroMLDocument, self).buildChildren(child_, node, nodeName_, True) + super(NeuroMLDocument, self)._buildChildren(child_, node, nodeName_, True) def summary(self, show_includes=True, show_non_network=True): @@ -16327,16 +22397,21 @@ def append(self,element): class BasePynnSynapse(BaseSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau_syn', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_('tau_syn', 'xs:float', 0, 0, {'use': 'required', 'name': 'tau_syn'}), ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BasePynnSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BasePynnSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.tau_syn = _cast(float, tau_syn) + self.tau_syn_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -16349,9 +22424,19 @@ def factory(*args_, **kwargs_): else: return BasePynnSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_tau_syn(self): + return self.tau_syn + def set_tau_syn(self, tau_syn): + self.tau_syn = tau_syn + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( - super(BasePynnSynapse, self).hasContent_() + super(BasePynnSynapse, self)._hasContent() ): return True else: @@ -16364,75 +22449,92 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BasePynnSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BasePynnSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BasePynnSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BasePynnSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BasePynnSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BasePynnSynapse'): - super(BasePynnSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BasePynnSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BasePynnSynapse'): + super(BasePynnSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BasePynnSynapse') if self.tau_syn is not None and 'tau_syn' not in already_processed: already_processed.add('tau_syn') outfile.write(' tau_syn="%s"' % self.gds_format_float(self.tau_syn, input_name='tau_syn')) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BasePynnSynapse', fromsubclass_=False, pretty_print=True): - super(BasePynnSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BasePynnSynapse', fromsubclass_=False, pretty_print=True): + super(BasePynnSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('tau_syn', node) if value is not None and 'tau_syn' not in already_processed: already_processed.add('tau_syn') - try: - self.tau_syn = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_syn): %s' % exp) + value = self.gds_parse_float(value, node, 'tau_syn') + self.tau_syn = value value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BasePynnSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BasePynnSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BasePynnSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BasePynnSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class BasePynnSynapse class basePyNNCell(BaseCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('cm', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('i_offset', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('tau_syn_E', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('tau_syn_I', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_init', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_('cm', 'xs:float', 0, 0, {'use': 'required', 'name': 'cm'}), + MemberSpec_('i_offset', 'xs:float', 0, 0, {'use': 'required', 'name': 'i_offset'}), + MemberSpec_('tau_syn_E', 'xs:float', 0, 0, {'use': 'required', 'name': 'tau_syn_E'}), + MemberSpec_('tau_syn_I', 'xs:float', 0, 0, {'use': 'required', 'name': 'tau_syn_I'}), + MemberSpec_('v_init', 'xs:float', 0, 0, {'use': 'required', 'name': 'v_init'}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(basePyNNCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("basePyNNCell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.cm = _cast(float, cm) + self.cm_nsprefix_ = None self.i_offset = _cast(float, i_offset) + self.i_offset_nsprefix_ = None self.tau_syn_E = _cast(float, tau_syn_E) + self.tau_syn_E_nsprefix_ = None self.tau_syn_I = _cast(float, tau_syn_I) + self.tau_syn_I_nsprefix_ = None self.v_init = _cast(float, v_init) + self.v_init_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -16445,9 +22547,35 @@ def factory(*args_, **kwargs_): else: return basePyNNCell(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_cm(self): + return self.cm + def set_cm(self, cm): + self.cm = cm + def get_i_offset(self): + return self.i_offset + def set_i_offset(self, i_offset): + self.i_offset = i_offset + def get_tau_syn_E(self): + return self.tau_syn_E + def set_tau_syn_E(self, tau_syn_E): + self.tau_syn_E = tau_syn_E + def get_tau_syn_I(self): + return self.tau_syn_I + def set_tau_syn_I(self, tau_syn_I): + self.tau_syn_I = tau_syn_I + def get_v_init(self): + return self.v_init + def set_v_init(self, v_init): + self.v_init = v_init + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( - super(basePyNNCell, self).hasContent_() + super(basePyNNCell, self)._hasContent() ): return True else: @@ -16460,21 +22588,23 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='b eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'basePyNNCell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNCell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNCell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='basePyNNCell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='basePyNNCell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='basePyNNCell'): - super(basePyNNCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNCell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='basePyNNCell'): + super(basePyNNCell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNCell') if self.cm is not None and 'cm' not in already_processed: already_processed.add('cm') outfile.write(' cm="%s"' % self.gds_format_float(self.cm, input_name='cm')) @@ -16493,89 +22623,95 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNCell', fromsubclass_=False, pretty_print=True): - super(basePyNNCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNCell', fromsubclass_=False, pretty_print=True): + super(basePyNNCell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('cm', node) if value is not None and 'cm' not in already_processed: already_processed.add('cm') - try: - self.cm = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (cm): %s' % exp) + value = self.gds_parse_float(value, node, 'cm') + self.cm = value value = find_attr_value_('i_offset', node) if value is not None and 'i_offset' not in already_processed: already_processed.add('i_offset') - try: - self.i_offset = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (i_offset): %s' % exp) + value = self.gds_parse_float(value, node, 'i_offset') + self.i_offset = value value = find_attr_value_('tau_syn_E', node) if value is not None and 'tau_syn_E' not in already_processed: already_processed.add('tau_syn_E') - try: - self.tau_syn_E = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_syn_E): %s' % exp) + value = self.gds_parse_float(value, node, 'tau_syn_E') + self.tau_syn_E = value value = find_attr_value_('tau_syn_I', node) if value is not None and 'tau_syn_I' not in already_processed: already_processed.add('tau_syn_I') - try: - self.tau_syn_I = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_syn_I): %s' % exp) + value = self.gds_parse_float(value, node, 'tau_syn_I') + self.tau_syn_I = value value = find_attr_value_('v_init', node) if value is not None and 'v_init' not in already_processed: already_processed.add('v_init') - try: - self.v_init = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (v_init): %s' % exp) + value = self.gds_parse_float(value, node, 'v_init') + self.v_init = value value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(basePyNNCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(basePyNNCell, self).buildChildren(child_, node, nodeName_, True) + super(basePyNNCell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(basePyNNCell, self)._buildChildren(child_, node, nodeName_, True) pass # end class basePyNNCell class ContinuousProjection(BaseProjection): - """Projection between two populations consisting of analog connections - (e.g. graded synapses)""" + """ContinuousProjection -- Projection between two populations consisting of analog connections (e.g. graded synapses) + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('continuous_connections', 'ContinuousConnection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ContinuousConnection', u'name': u'continuousConnection', u'minOccurs': u'0'}, None), - MemberSpec_('continuous_connection_instances', 'ContinuousConnectionInstance', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ContinuousConnectionInstance', u'name': u'continuousConnectionInstance', u'minOccurs': u'0'}, None), - MemberSpec_('continuous_connection_instance_ws', 'ContinuousConnectionInstanceW', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ContinuousConnectionInstanceW', u'name': u'continuousConnectionInstanceW', u'minOccurs': u'0'}, None), + MemberSpec_('continuous_connections', 'ContinuousConnection', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'continuousConnection', 'type': 'ContinuousConnection'}, None), + MemberSpec_('continuous_connection_instances', 'ContinuousConnectionInstance', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'continuousConnectionInstance', 'type': 'ContinuousConnectionInstance'}, None), + MemberSpec_('continuous_connection_instance_ws', 'ContinuousConnectionInstanceW', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'continuousConnectionInstanceW', 'type': 'ContinuousConnectionInstanceW'}, None), ] subclass = None superclass = BaseProjection - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, continuous_connections=None, continuous_connection_instances=None, continuous_connection_instance_ws=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, continuous_connections=None, continuous_connection_instances=None, continuous_connection_instance_ws=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ContinuousProjection, self).__init__(neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ContinuousProjection"), self).__init__(neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_) if continuous_connections is None: self.continuous_connections = [] else: self.continuous_connections = continuous_connections + self.continuous_connections_nsprefix_ = None if continuous_connection_instances is None: self.continuous_connection_instances = [] else: self.continuous_connection_instances = continuous_connection_instances + self.continuous_connection_instances_nsprefix_ = None if continuous_connection_instance_ws is None: self.continuous_connection_instance_ws = [] else: self.continuous_connection_instance_ws = continuous_connection_instance_ws + self.continuous_connection_instance_ws_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -16587,17 +22723,51 @@ def factory(*args_, **kwargs_): else: return ContinuousProjection(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_continuousConnection(self): + return self.continuous_connections + def set_continuousConnection(self, continuous_connections): + self.continuous_connections = continuous_connections + def add_continuousConnection(self, value): + self.continuous_connections.append(value) + def insert_continuousConnection_at(self, index, value): + self.continuous_connections.insert(index, value) + def replace_continuousConnection_at(self, index, value): + self.continuous_connections[index] = value + def get_continuousConnectionInstance(self): + return self.continuous_connection_instances + def set_continuousConnectionInstance(self, continuous_connection_instances): + self.continuous_connection_instances = continuous_connection_instances + def add_continuousConnectionInstance(self, value): + self.continuous_connection_instances.append(value) + def insert_continuousConnectionInstance_at(self, index, value): + self.continuous_connection_instances.insert(index, value) + def replace_continuousConnectionInstance_at(self, index, value): + self.continuous_connection_instances[index] = value + def get_continuousConnectionInstanceW(self): + return self.continuous_connection_instance_ws + def set_continuousConnectionInstanceW(self, continuous_connection_instance_ws): + self.continuous_connection_instance_ws = continuous_connection_instance_ws + def add_continuousConnectionInstanceW(self, value): + self.continuous_connection_instance_ws.append(value) + def insert_continuousConnectionInstanceW_at(self, index, value): + self.continuous_connection_instance_ws.insert(index, value) + def replace_continuousConnectionInstanceW_at(self, index, value): + self.continuous_connection_instance_ws[index] = value + def _hasContent(self): if ( self.continuous_connections or self.continuous_connection_instances or self.continuous_connection_instance_ws or - super(ContinuousProjection, self).hasContent_() + super(ContinuousProjection, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousProjection', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ContinuousProjection', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContinuousProjection') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -16605,61 +22775,70 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ContinuousProjection': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousProjection') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousProjection') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousProjection', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousProjection', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousProjection'): - super(ContinuousProjection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousProjection') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousProjection', fromsubclass_=False, pretty_print=True): - super(ContinuousProjection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousProjection'): + super(ContinuousProjection, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousProjection') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ContinuousProjection', fromsubclass_=False, pretty_print=True): + super(ContinuousProjection, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for continuousConnection_ in self.continuous_connections: + namespaceprefix_ = self.continuous_connections_nsprefix_ + ':' if (UseCapturedNS_ and self.continuous_connections_nsprefix_) else '' continuousConnection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='continuousConnection', pretty_print=pretty_print) for continuousConnectionInstance_ in self.continuous_connection_instances: + namespaceprefix_ = self.continuous_connection_instances_nsprefix_ + ':' if (UseCapturedNS_ and self.continuous_connection_instances_nsprefix_) else '' continuousConnectionInstance_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='continuousConnectionInstance', pretty_print=pretty_print) for continuousConnectionInstanceW_ in self.continuous_connection_instance_ws: + namespaceprefix_ = self.continuous_connection_instance_ws_nsprefix_ + ':' if (UseCapturedNS_ and self.continuous_connection_instance_ws_nsprefix_) else '' continuousConnectionInstanceW_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='continuousConnectionInstanceW', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ContinuousProjection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(ContinuousProjection, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'continuousConnection': class_obj_ = self.get_class_obj_(child_, ContinuousConnection) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.continuous_connections.append(obj_) obj_.original_tagname_ = 'continuousConnection' elif nodeName_ == 'continuousConnectionInstance': class_obj_ = self.get_class_obj_(child_, ContinuousConnectionInstance) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.continuous_connection_instances.append(obj_) obj_.original_tagname_ = 'continuousConnectionInstance' elif nodeName_ == 'continuousConnectionInstanceW': obj_ = ContinuousConnectionInstanceW.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.continuous_connection_instance_ws.append(obj_) obj_.original_tagname_ = 'continuousConnectionInstanceW' - super(ContinuousProjection, self).buildChildren(child_, node, nodeName_, True) + super(ContinuousProjection, self)._buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): """Export to HDF5 file. """ @@ -16745,31 +22924,39 @@ def exportHdf5(self, h5file, h5Group): class ElectricalProjection(BaseProjection): - """Projection between two populations consisting of electrical - connections (gap junctions)""" + """ElectricalProjection -- Projection between two populations consisting of electrical connections (gap junctions) + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('electrical_connections', 'ElectricalConnection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ElectricalConnection', u'name': u'electricalConnection', u'minOccurs': u'0'}, None), - MemberSpec_('electrical_connection_instances', 'ElectricalConnectionInstance', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ElectricalConnectionInstance', u'name': u'electricalConnectionInstance', u'minOccurs': u'0'}, None), - MemberSpec_('electrical_connection_instance_ws', 'ElectricalConnectionInstanceW', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ElectricalConnectionInstanceW', u'name': u'electricalConnectionInstanceW', u'minOccurs': u'0'}, None), + MemberSpec_('electrical_connections', 'ElectricalConnection', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'electricalConnection', 'type': 'ElectricalConnection'}, None), + MemberSpec_('electrical_connection_instances', 'ElectricalConnectionInstance', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'electricalConnectionInstance', 'type': 'ElectricalConnectionInstance'}, None), + MemberSpec_('electrical_connection_instance_ws', 'ElectricalConnectionInstanceW', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'electricalConnectionInstanceW', 'type': 'ElectricalConnectionInstanceW'}, None), ] subclass = None superclass = BaseProjection - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, electrical_connections=None, electrical_connection_instances=None, electrical_connection_instance_ws=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, electrical_connections=None, electrical_connection_instances=None, electrical_connection_instance_ws=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ElectricalProjection, self).__init__(neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ElectricalProjection"), self).__init__(neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_) if electrical_connections is None: self.electrical_connections = [] else: self.electrical_connections = electrical_connections + self.electrical_connections_nsprefix_ = None if electrical_connection_instances is None: self.electrical_connection_instances = [] else: self.electrical_connection_instances = electrical_connection_instances + self.electrical_connection_instances_nsprefix_ = None if electrical_connection_instance_ws is None: self.electrical_connection_instance_ws = [] else: self.electrical_connection_instance_ws = electrical_connection_instance_ws + self.electrical_connection_instance_ws_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -16781,17 +22968,51 @@ def factory(*args_, **kwargs_): else: return ElectricalProjection(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_electricalConnection(self): + return self.electrical_connections + def set_electricalConnection(self, electrical_connections): + self.electrical_connections = electrical_connections + def add_electricalConnection(self, value): + self.electrical_connections.append(value) + def insert_electricalConnection_at(self, index, value): + self.electrical_connections.insert(index, value) + def replace_electricalConnection_at(self, index, value): + self.electrical_connections[index] = value + def get_electricalConnectionInstance(self): + return self.electrical_connection_instances + def set_electricalConnectionInstance(self, electrical_connection_instances): + self.electrical_connection_instances = electrical_connection_instances + def add_electricalConnectionInstance(self, value): + self.electrical_connection_instances.append(value) + def insert_electricalConnectionInstance_at(self, index, value): + self.electrical_connection_instances.insert(index, value) + def replace_electricalConnectionInstance_at(self, index, value): + self.electrical_connection_instances[index] = value + def get_electricalConnectionInstanceW(self): + return self.electrical_connection_instance_ws + def set_electricalConnectionInstanceW(self, electrical_connection_instance_ws): + self.electrical_connection_instance_ws = electrical_connection_instance_ws + def add_electricalConnectionInstanceW(self, value): + self.electrical_connection_instance_ws.append(value) + def insert_electricalConnectionInstanceW_at(self, index, value): + self.electrical_connection_instance_ws.insert(index, value) + def replace_electricalConnectionInstanceW_at(self, index, value): + self.electrical_connection_instance_ws[index] = value + def _hasContent(self): if ( self.electrical_connections or self.electrical_connection_instances or self.electrical_connection_instance_ws or - super(ElectricalProjection, self).hasContent_() + super(ElectricalProjection, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalProjection', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ElectricalProjection', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ElectricalProjection') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -16799,61 +23020,70 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ElectricalProjection': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalProjection') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalProjection') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalProjection', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalProjection', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalProjection'): - super(ElectricalProjection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalProjection') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalProjection', fromsubclass_=False, pretty_print=True): - super(ElectricalProjection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalProjection'): + super(ElectricalProjection, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalProjection') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='ElectricalProjection', fromsubclass_=False, pretty_print=True): + super(ElectricalProjection, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for electricalConnection_ in self.electrical_connections: + namespaceprefix_ = self.electrical_connections_nsprefix_ + ':' if (UseCapturedNS_ and self.electrical_connections_nsprefix_) else '' electricalConnection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='electricalConnection', pretty_print=pretty_print) for electricalConnectionInstance_ in self.electrical_connection_instances: + namespaceprefix_ = self.electrical_connection_instances_nsprefix_ + ':' if (UseCapturedNS_ and self.electrical_connection_instances_nsprefix_) else '' electricalConnectionInstance_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='electricalConnectionInstance', pretty_print=pretty_print) for electricalConnectionInstanceW_ in self.electrical_connection_instance_ws: + namespaceprefix_ = self.electrical_connection_instance_ws_nsprefix_ + ':' if (UseCapturedNS_ and self.electrical_connection_instance_ws_nsprefix_) else '' electricalConnectionInstanceW_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='electricalConnectionInstanceW', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ElectricalProjection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(ElectricalProjection, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'electricalConnection': class_obj_ = self.get_class_obj_(child_, ElectricalConnection) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.electrical_connections.append(obj_) obj_.original_tagname_ = 'electricalConnection' elif nodeName_ == 'electricalConnectionInstance': class_obj_ = self.get_class_obj_(child_, ElectricalConnectionInstance) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.electrical_connection_instances.append(obj_) obj_.original_tagname_ = 'electricalConnectionInstance' elif nodeName_ == 'electricalConnectionInstanceW': obj_ = ElectricalConnectionInstanceW.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.electrical_connection_instance_ws.append(obj_) obj_.original_tagname_ = 'electricalConnectionInstanceW' - super(ElectricalProjection, self).buildChildren(child_, node, nodeName_, True) + super(ElectricalProjection, self)._buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): """Export to HDF5 file. """ @@ -16934,28 +23164,40 @@ def exportHdf5(self, h5file, h5Group): class BaseConnectionNewFormat(BaseConnection): - """Base of all synaptic connections with preCell, postSegment, etc. See - BaseConnectionOldFormat""" + """BaseConnectionNewFormat -- Base of all synaptic connections with preCell, postSegment, etc. + See BaseConnectionOldFormat + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('pre_cell', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('pre_segment', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('pre_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), - MemberSpec_('post_cell', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('post_segment', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('post_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), + MemberSpec_('pre_cell', 'xs:string', 0, 0, {'use': 'required', 'name': 'pre_cell'}), + MemberSpec_('pre_segment', 'NonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'pre_segment'}), + MemberSpec_('pre_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional', 'name': 'pre_fraction_along'}), + MemberSpec_('post_cell', 'xs:string', 0, 0, {'use': 'required', 'name': 'post_cell'}), + MemberSpec_('post_segment', 'NonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'post_segment'}), + MemberSpec_('post_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional', 'name': 'post_fraction_along'}), ] subclass = None superclass = BaseConnection - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConnectionNewFormat, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BaseConnectionNewFormat"), self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) self.pre_cell = _cast(None, pre_cell) + self.pre_cell_nsprefix_ = None self.pre_segment = _cast(int, pre_segment) + self.pre_segment_nsprefix_ = None self.pre_fraction_along = _cast(float, pre_fraction_along) + self.pre_fraction_along_nsprefix_ = None self.post_cell = _cast(None, post_cell) + self.post_cell_nsprefix_ = None self.post_segment = _cast(int, post_segment) + self.post_segment_nsprefix_ = None self.post_fraction_along = _cast(float, post_fraction_along) + self.post_fraction_along_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -16968,20 +23210,62 @@ def factory(*args_, **kwargs_): else: return BaseConnectionNewFormat(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_preCell(self): + return self.pre_cell + def set_preCell(self, pre_cell): + self.pre_cell = pre_cell + def get_preSegment(self): + return self.pre_segment + def set_preSegment(self, pre_segment): + self.pre_segment = pre_segment + def get_preFractionAlong(self): + return self.pre_fraction_along + def set_preFractionAlong(self, pre_fraction_along): + self.pre_fraction_along = pre_fraction_along + def get_postCell(self): + return self.post_cell + def set_postCell(self, post_cell): + self.post_cell = post_cell + def get_postSegment(self): + return self.post_segment + def set_postSegment(self, post_segment): + self.post_segment = post_segment + def get_postFractionAlong(self): + return self.post_fraction_along + def set_postFractionAlong(self, post_fraction_along): + self.post_fraction_along = post_fraction_along + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) + return False if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ZeroToOne' % {"value": value, "lineno": lineno} ) + result = False if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) - def hasContent_(self): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ZeroToOne' % {"value": value, "lineno": lineno} ) + result = False + def _hasContent(self): if ( - super(BaseConnectionNewFormat, self).hasContent_() + super(BaseConnectionNewFormat, self)._hasContent() ): return True else: @@ -16994,53 +23278,63 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseConnectionNewFormat': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionNewFormat') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionNewFormat') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConnectionNewFormat', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConnectionNewFormat', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConnectionNewFormat'): - super(BaseConnectionNewFormat, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionNewFormat') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConnectionNewFormat'): + super(BaseConnectionNewFormat, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionNewFormat') if self.pre_cell is not None and 'pre_cell' not in already_processed: already_processed.add('pre_cell') outfile.write(' preCell=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.pre_cell), input_name='preCell')), )) if self.pre_segment != 0 and 'pre_segment' not in already_processed: already_processed.add('pre_segment') - outfile.write(' preSegment=%s' % (quote_attrib(self.pre_segment), )) + outfile.write(' preSegment="%s"' % self.gds_format_integer(self.pre_segment, input_name='preSegment')) if self.pre_fraction_along != 0.5 and 'pre_fraction_along' not in already_processed: already_processed.add('pre_fraction_along') - outfile.write(' preFractionAlong=%s' % (quote_attrib(self.pre_fraction_along), )) + outfile.write(' preFractionAlong="%s"' % self.gds_format_float(self.pre_fraction_along, input_name='preFractionAlong')) if self.post_cell is not None and 'post_cell' not in already_processed: already_processed.add('post_cell') outfile.write(' postCell=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.post_cell), input_name='postCell')), )) if self.post_segment != 0 and 'post_segment' not in already_processed: already_processed.add('post_segment') - outfile.write(' postSegment=%s' % (quote_attrib(self.post_segment), )) + outfile.write(' postSegment="%s"' % self.gds_format_integer(self.post_segment, input_name='postSegment')) if self.post_fraction_along != 0.5 and 'post_fraction_along' not in already_processed: already_processed.add('post_fraction_along') - outfile.write(' postFractionAlong=%s' % (quote_attrib(self.post_fraction_along), )) + outfile.write(' postFractionAlong="%s"' % self.gds_format_float(self.post_fraction_along, input_name='postFractionAlong')) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnectionNewFormat', fromsubclass_=False, pretty_print=True): - super(BaseConnectionNewFormat, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnectionNewFormat', fromsubclass_=False, pretty_print=True): + super(BaseConnectionNewFormat, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('preCell', node) if value is not None and 'preCell' not in already_processed: already_processed.add('preCell') @@ -17048,20 +23342,15 @@ def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('preSegment', node) if value is not None and 'preSegment' not in already_processed: already_processed.add('preSegment') - try: - self.pre_segment = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.pre_segment = self.gds_parse_integer(value, node, 'preSegment') if self.pre_segment < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.pre_segment) # validate type NonNegativeInteger value = find_attr_value_('preFractionAlong', node) if value is not None and 'preFractionAlong' not in already_processed: already_processed.add('preFractionAlong') - try: - self.pre_fraction_along = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (preFractionAlong): %s' % exp) + value = self.gds_parse_float(value, node, 'preFractionAlong') + self.pre_fraction_along = value self.validate_ZeroToOne(self.pre_fraction_along) # validate type ZeroToOne value = find_attr_value_('postCell', node) if value is not None and 'postCell' not in already_processed: @@ -17070,56 +23359,62 @@ def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('postSegment', node) if value is not None and 'postSegment' not in already_processed: already_processed.add('postSegment') - try: - self.post_segment = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.post_segment = self.gds_parse_integer(value, node, 'postSegment') if self.post_segment < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.post_segment) # validate type NonNegativeInteger value = find_attr_value_('postFractionAlong', node) if value is not None and 'postFractionAlong' not in already_processed: already_processed.add('postFractionAlong') - try: - self.post_fraction_along = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (postFractionAlong): %s' % exp) + value = self.gds_parse_float(value, node, 'postFractionAlong') + self.post_fraction_along = value self.validate_ZeroToOne(self.post_fraction_along) # validate type ZeroToOne value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BaseConnectionNewFormat, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConnectionNewFormat, self).buildChildren(child_, node, nodeName_, True) + super(BaseConnectionNewFormat, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BaseConnectionNewFormat, self)._buildChildren(child_, node, nodeName_, True) pass # end class BaseConnectionNewFormat class BaseConnectionOldFormat(BaseConnection): - """Base of all synaptic connections with preCellId, postSegmentId, etc. - Note: this is not the best name for these attributes, since Id - is superfluous, hence BaseConnectionNewFormat""" + """BaseConnectionOldFormat -- Base of all synaptic connections with preCellId, postSegmentId, etc. + Note: this is not the best name for these attributes, since Id is superfluous, hence BaseConnectionNewFormat + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('pre_cell_id', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('pre_segment_id', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('pre_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), - MemberSpec_('post_cell_id', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('post_segment_id', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('post_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), + MemberSpec_('pre_cell_id', 'xs:string', 0, 0, {'use': 'required', 'name': 'pre_cell_id'}), + MemberSpec_('pre_segment_id', 'NonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'pre_segment_id'}), + MemberSpec_('pre_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional', 'name': 'pre_fraction_along'}), + MemberSpec_('post_cell_id', 'xs:string', 0, 0, {'use': 'required', 'name': 'post_cell_id'}), + MemberSpec_('post_segment_id', 'NonNegativeInteger', 0, 1, {'use': 'optional', 'name': 'post_segment_id'}), + MemberSpec_('post_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional', 'name': 'post_fraction_along'}), ] subclass = None superclass = BaseConnection - def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id='0', pre_fraction_along='0.5', post_cell_id=None, post_segment_id='0', post_fraction_along='0.5', extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id='0', pre_fraction_along='0.5', post_cell_id=None, post_segment_id='0', post_fraction_along='0.5', extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConnectionOldFormat, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BaseConnectionOldFormat"), self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) self.pre_cell_id = _cast(None, pre_cell_id) + self.pre_cell_id_nsprefix_ = None self.pre_segment_id = _cast(int, pre_segment_id) + self.pre_segment_id_nsprefix_ = None self.pre_fraction_along = _cast(float, pre_fraction_along) + self.pre_fraction_along_nsprefix_ = None self.post_cell_id = _cast(None, post_cell_id) + self.post_cell_id_nsprefix_ = None self.post_segment_id = _cast(int, post_segment_id) + self.post_segment_id_nsprefix_ = None self.post_fraction_along = _cast(float, post_fraction_along) + self.post_fraction_along_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -17132,20 +23427,62 @@ def factory(*args_, **kwargs_): else: return BaseConnectionOldFormat(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_preCellId(self): + return self.pre_cell_id + def set_preCellId(self, pre_cell_id): + self.pre_cell_id = pre_cell_id + def get_preSegmentId(self): + return self.pre_segment_id + def set_preSegmentId(self, pre_segment_id): + self.pre_segment_id = pre_segment_id + def get_preFractionAlong(self): + return self.pre_fraction_along + def set_preFractionAlong(self, pre_fraction_along): + self.pre_fraction_along = pre_fraction_along + def get_postCellId(self): + return self.post_cell_id + def set_postCellId(self, post_cell_id): + self.post_cell_id = post_cell_id + def get_postSegmentId(self): + return self.post_segment_id + def set_postSegmentId(self, post_segment_id): + self.post_segment_id = post_segment_id + def get_postFractionAlong(self): + return self.post_fraction_along + def set_postFractionAlong(self, post_fraction_along): + self.post_fraction_along = post_fraction_along + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) + return False pass def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) + return False if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ZeroToOne' % {"value": value, "lineno": lineno} ) + result = False if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) - def hasContent_(self): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ZeroToOne' % {"value": value, "lineno": lineno} ) + result = False + def _hasContent(self): if ( - super(BaseConnectionOldFormat, self).hasContent_() + super(BaseConnectionOldFormat, self)._hasContent() ): return True else: @@ -17158,53 +23495,63 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseConnectionOldFormat': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionOldFormat') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionOldFormat') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConnectionOldFormat', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConnectionOldFormat', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConnectionOldFormat'): - super(BaseConnectionOldFormat, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionOldFormat') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConnectionOldFormat'): + super(BaseConnectionOldFormat, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionOldFormat') if self.pre_cell_id is not None and 'pre_cell_id' not in already_processed: already_processed.add('pre_cell_id') outfile.write(' preCellId=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.pre_cell_id), input_name='preCellId')), )) if self.pre_segment_id != 0 and 'pre_segment_id' not in already_processed: already_processed.add('pre_segment_id') - outfile.write(' preSegmentId=%s' % (quote_attrib(self.pre_segment_id), )) + outfile.write(' preSegmentId="%s"' % self.gds_format_integer(self.pre_segment_id, input_name='preSegmentId')) if self.pre_fraction_along != 0.5 and 'pre_fraction_along' not in already_processed: already_processed.add('pre_fraction_along') - outfile.write(' preFractionAlong=%s' % (quote_attrib(self.pre_fraction_along), )) + outfile.write(' preFractionAlong="%s"' % self.gds_format_float(self.pre_fraction_along, input_name='preFractionAlong')) if self.post_cell_id is not None and 'post_cell_id' not in already_processed: already_processed.add('post_cell_id') outfile.write(' postCellId=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.post_cell_id), input_name='postCellId')), )) if self.post_segment_id != 0 and 'post_segment_id' not in already_processed: already_processed.add('post_segment_id') - outfile.write(' postSegmentId=%s' % (quote_attrib(self.post_segment_id), )) + outfile.write(' postSegmentId="%s"' % self.gds_format_integer(self.post_segment_id, input_name='postSegmentId')) if self.post_fraction_along != 0.5 and 'post_fraction_along' not in already_processed: already_processed.add('post_fraction_along') - outfile.write(' postFractionAlong=%s' % (quote_attrib(self.post_fraction_along), )) + outfile.write(' postFractionAlong="%s"' % self.gds_format_float(self.post_fraction_along, input_name='postFractionAlong')) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnectionOldFormat', fromsubclass_=False, pretty_print=True): - super(BaseConnectionOldFormat, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnectionOldFormat', fromsubclass_=False, pretty_print=True): + super(BaseConnectionOldFormat, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('preCellId', node) if value is not None and 'preCellId' not in already_processed: already_processed.add('preCellId') @@ -17212,20 +23559,15 @@ def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('preSegmentId', node) if value is not None and 'preSegmentId' not in already_processed: already_processed.add('preSegmentId') - try: - self.pre_segment_id = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.pre_segment_id = self.gds_parse_integer(value, node, 'preSegmentId') if self.pre_segment_id < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.pre_segment_id) # validate type NonNegativeInteger value = find_attr_value_('preFractionAlong', node) if value is not None and 'preFractionAlong' not in already_processed: already_processed.add('preFractionAlong') - try: - self.pre_fraction_along = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (preFractionAlong): %s' % exp) + value = self.gds_parse_float(value, node, 'preFractionAlong') + self.pre_fraction_along = value self.validate_ZeroToOne(self.pre_fraction_along) # validate type ZeroToOne value = find_attr_value_('postCellId', node) if value is not None and 'postCellId' not in already_processed: @@ -17234,55 +23576,58 @@ def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('postSegmentId', node) if value is not None and 'postSegmentId' not in already_processed: already_processed.add('postSegmentId') - try: - self.post_segment_id = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.post_segment_id = self.gds_parse_integer(value, node, 'postSegmentId') if self.post_segment_id < 0: raise_parse_error(node, 'Invalid NonNegativeInteger') self.validate_NonNegativeInteger(self.post_segment_id) # validate type NonNegativeInteger value = find_attr_value_('postFractionAlong', node) if value is not None and 'postFractionAlong' not in already_processed: already_processed.add('postFractionAlong') - try: - self.post_fraction_along = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (postFractionAlong): %s' % exp) + value = self.gds_parse_float(value, node, 'postFractionAlong') + self.post_fraction_along = value self.validate_ZeroToOne(self.post_fraction_along) # validate type ZeroToOne value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BaseConnectionOldFormat, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConnectionOldFormat, self).buildChildren(child_, node, nodeName_, True) + super(BaseConnectionOldFormat, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BaseConnectionOldFormat, self)._buildChildren(child_, node, nodeName_, True) pass # end class BaseConnectionOldFormat class Projection(BaseProjection): - """Projection (set of synaptic connections) between two populations. - Chemical/event based synaptic transmission""" + """Projection -- Projection (set of synaptic connections) between two populations. Chemical/event based synaptic transmission + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('synapse', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('connections', 'Connection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Connection', u'name': u'connection', u'minOccurs': u'0'}, None), - MemberSpec_('connection_wds', 'ConnectionWD', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ConnectionWD', u'name': u'connectionWD', u'minOccurs': u'0'}, None), + MemberSpec_('synapse', 'NmlId', 0, 0, {'use': 'required', 'name': 'synapse'}), + MemberSpec_('connections', 'Connection', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'connection', 'type': 'Connection'}, None), + MemberSpec_('connection_wds', 'ConnectionWD', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'connectionWD', 'type': 'ConnectionWD'}, None), ] subclass = None superclass = BaseProjection - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, synapse=None, connections=None, connection_wds=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, synapse=None, connections=None, connection_wds=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Projection, self).__init__(neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Projection"), self).__init__(neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_) self.synapse = _cast(None, synapse) + self.synapse_nsprefix_ = None if connections is None: self.connections = [] else: self.connections = connections + self.connections_nsprefix_ = None if connection_wds is None: self.connection_wds = [] else: self.connection_wds = connection_wds + self.connection_wds_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -17294,23 +23639,55 @@ def factory(*args_, **kwargs_): else: return Projection(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_connection(self): + return self.connections + def set_connection(self, connections): + self.connections = connections + def add_connection(self, value): + self.connections.append(value) + def insert_connection_at(self, index, value): + self.connections.insert(index, value) + def replace_connection_at(self, index, value): + self.connections[index] = value + def get_connectionWD(self): + return self.connection_wds + def set_connectionWD(self, connection_wds): + self.connection_wds = connection_wds + def add_connectionWD(self, value): + self.connection_wds.append(value) + def insert_connectionWD_at(self, index, value): + self.connection_wds.insert(index, value) + def replace_connectionWD_at(self, index, value): + self.connection_wds[index] = value + def get_synapse(self): + return self.synapse + def set_synapse(self, synapse): + self.synapse = synapse def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( self.connections or self.connection_wds or - super(Projection, self).hasContent_() + super(Projection, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Projection', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Projection', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Projection') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -17318,60 +23695,68 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='P eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Projection': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Projection') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Projection') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Projection', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Projection', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Projection'): - super(Projection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Projection') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Projection'): + super(Projection, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Projection') if self.synapse is not None and 'synapse' not in already_processed: already_processed.add('synapse') - outfile.write(' synapse=%s' % (quote_attrib(self.synapse), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Projection', fromsubclass_=False, pretty_print=True): - super(Projection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' synapse=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse), input_name='synapse')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Projection', fromsubclass_=False, pretty_print=True): + super(Projection, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for connection_ in self.connections: + namespaceprefix_ = self.connections_nsprefix_ + ':' if (UseCapturedNS_ and self.connections_nsprefix_) else '' connection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='connection', pretty_print=pretty_print) for connectionWD_ in self.connection_wds: + namespaceprefix_ = self.connection_wds_nsprefix_ + ':' if (UseCapturedNS_ and self.connection_wds_nsprefix_) else '' connectionWD_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='connectionWD', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('synapse', node) if value is not None and 'synapse' not in already_processed: already_processed.add('synapse') self.synapse = value self.validate_NmlId(self.synapse) # validate type NmlId - super(Projection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(Projection, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'connection': obj_ = Connection.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.connections.append(obj_) obj_.original_tagname_ = 'connection' elif nodeName_ == 'connectionWD': obj_ = ConnectionWD.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.connection_wds.append(obj_) obj_.original_tagname_ = 'connectionWD' - super(Projection, self).buildChildren(child_, node, nodeName_, True) + super(Projection, self)._buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): """Export to HDF5 file. """ @@ -17470,16 +23855,21 @@ def __str__(self): class SpikeGeneratorRefPoisson(SpikeGeneratorPoisson): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('minimum_isi', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('minimum_isi', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'minimum_isi'}), ] subclass = None superclass = SpikeGeneratorPoisson - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, minimum_isi=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, minimum_isi=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeGeneratorRefPoisson, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, average_rate, **kwargs_) + self.ns_prefix_ = None + super(globals().get("SpikeGeneratorRefPoisson"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, average_rate, **kwargs_) self.minimum_isi = _cast(None, minimum_isi) + self.minimum_isi_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -17491,16 +23881,28 @@ def factory(*args_, **kwargs_): else: return SpikeGeneratorRefPoisson(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_minimumISI(self): + return self.minimum_isi + def set_minimumISI(self, minimum_isi): + self.minimum_isi = minimum_isi def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(SpikeGeneratorRefPoisson, self).hasContent_() + super(SpikeGeneratorRefPoisson, self)._hasContent() ): return True else: @@ -17513,57 +23915,68 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SpikeGeneratorRefPoisson': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRefPoisson') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRefPoisson') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGeneratorRefPoisson', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGeneratorRefPoisson', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGeneratorRefPoisson'): - super(SpikeGeneratorRefPoisson, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRefPoisson') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGeneratorRefPoisson'): + super(SpikeGeneratorRefPoisson, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRefPoisson') if self.minimum_isi is not None and 'minimum_isi' not in already_processed: already_processed.add('minimum_isi') - outfile.write(' minimumISI=%s' % (quote_attrib(self.minimum_isi), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorRefPoisson', fromsubclass_=False, pretty_print=True): - super(SpikeGeneratorRefPoisson, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' minimumISI=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.minimum_isi), input_name='minimumISI')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorRefPoisson', fromsubclass_=False, pretty_print=True): + super(SpikeGeneratorRefPoisson, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('minimumISI', node) if value is not None and 'minimumISI' not in already_processed: already_processed.add('minimumISI') self.minimum_isi = value self.validate_Nml2Quantity_time(self.minimum_isi) # validate type Nml2Quantity_time - super(SpikeGeneratorRefPoisson, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeGeneratorRefPoisson, self).buildChildren(child_, node, nodeName_, True) + super(SpikeGeneratorRefPoisson, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(SpikeGeneratorRefPoisson, self)._buildChildren(child_, node, nodeName_, True) pass # end class SpikeGeneratorRefPoisson class ConcentrationModel_D(DecayingPoolConcentrationModel): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_('type', 'xs:string', 0, 0, {'use': 'required', 'name': 'type'}), ] subclass = None superclass = DecayingPoolConcentrationModel - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, ion=None, resting_conc=None, decay_constant=None, shell_thickness=None, type='decayingPoolConcentrationModel', **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, ion=None, resting_conc=None, decay_constant=None, shell_thickness=None, type='decayingPoolConcentrationModel', gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ConcentrationModel_D, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, ion, resting_conc, decay_constant, shell_thickness, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ConcentrationModel_D"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, ion, resting_conc, decay_constant, shell_thickness, **kwargs_) self.type = _cast(None, type) + self.type_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -17575,9 +23988,17 @@ def factory(*args_, **kwargs_): else: return ConcentrationModel_D(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_type(self): + return self.type + def set_type(self, type): + self.type = type + def _hasContent(self): if ( - super(ConcentrationModel_D, self).hasContent_() + super(ConcentrationModel_D, self)._hasContent() ): return True else: @@ -17590,54 +24011,64 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ConcentrationModel_D': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConcentrationModel_D') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConcentrationModel_D') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ConcentrationModel_D', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ConcentrationModel_D', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ConcentrationModel_D'): - super(ConcentrationModel_D, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConcentrationModel_D') - if self.type != "decayingPoolConcentrationModel" and 'type' not in already_processed: + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ConcentrationModel_D'): + super(ConcentrationModel_D, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConcentrationModel_D') + if self.type is not None and 'type' not in already_processed: already_processed.add('type') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConcentrationModel_D', fromsubclass_=False, pretty_print=True): - super(ConcentrationModel_D, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConcentrationModel_D', fromsubclass_=False, pretty_print=True): + super(ConcentrationModel_D, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type = value - super(ConcentrationModel_D, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ConcentrationModel_D, self).buildChildren(child_, node, nodeName_, True) + super(ConcentrationModel_D, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ConcentrationModel_D, self)._buildChildren(child_, node, nodeName_, True) pass # end class ConcentrationModel_D class ChannelDensityNernstCa2(ChannelDensityNernst): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = ChannelDensityNernst - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNernstCa2, self).__init__(neuro_lex_id, id, ion_channel, cond_density, segment_groups, segments, ion, variable_parameters, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ChannelDensityNernstCa2"), self).__init__(neuro_lex_id, id, ion_channel, cond_density, segment_groups, segments, ion, variable_parameters, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -17649,9 +24080,13 @@ def factory(*args_, **kwargs_): else: return ChannelDensityNernstCa2(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(ChannelDensityNernstCa2, self).hasContent_() + super(ChannelDensityNernstCa2, self)._hasContent() ): return True else: @@ -17664,49 +24099,60 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ChannelDensityNernstCa2': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernstCa2') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernstCa2') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNernstCa2', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNernstCa2', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNernstCa2'): - super(ChannelDensityNernstCa2, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernstCa2') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNernstCa2', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNernstCa2, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNernstCa2'): + super(ChannelDensityNernstCa2, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernstCa2') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNernstCa2', fromsubclass_=False, pretty_print=True): + super(ChannelDensityNernstCa2, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ChannelDensityNernstCa2, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ChannelDensityNernstCa2, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(ChannelDensityNernstCa2, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ChannelDensityNernstCa2, self)._buildChildren(child_, node, nodeName_, True) pass # end class ChannelDensityNernstCa2 class ChannelDensityVShift(ChannelDensity): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('v_shift', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_('v_shift', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'v_shift'}), ] subclass = None superclass = ChannelDensity - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, erev=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, v_shift=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, erev=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, v_shift=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityVShift, self).__init__(neuro_lex_id, id, ion_channel, cond_density, erev, segment_groups, segments, ion, variable_parameters, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ChannelDensityVShift"), self).__init__(neuro_lex_id, id, ion_channel, cond_density, erev, segment_groups, segments, ion, variable_parameters, **kwargs_) self.v_shift = _cast(None, v_shift) + self.v_shift_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -17718,16 +24164,28 @@ def factory(*args_, **kwargs_): else: return ChannelDensityVShift(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_vShift(self): + return self.v_shift + def set_vShift(self, v_shift): + self.v_shift = v_shift def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] + def _hasContent(self): if ( - super(ChannelDensityVShift, self).hasContent_() + super(ChannelDensityVShift, self)._hasContent() ): return True else: @@ -17740,67 +24198,84 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ChannelDensityVShift': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityVShift') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityVShift') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityVShift', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityVShift', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityVShift'): - super(ChannelDensityVShift, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityVShift') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityVShift'): + super(ChannelDensityVShift, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityVShift') if self.v_shift is not None and 'v_shift' not in already_processed: already_processed.add('v_shift') - outfile.write(' vShift=%s' % (quote_attrib(self.v_shift), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityVShift', fromsubclass_=False, pretty_print=True): - super(ChannelDensityVShift, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' vShift=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.v_shift), input_name='vShift')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityVShift', fromsubclass_=False, pretty_print=True): + super(ChannelDensityVShift, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('vShift', node) if value is not None and 'vShift' not in already_processed: already_processed.add('vShift') self.v_shift = value self.validate_Nml2Quantity_voltage(self.v_shift) # validate type Nml2Quantity_voltage - super(ChannelDensityVShift, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ChannelDensityVShift, self).buildChildren(child_, node, nodeName_, True) + super(ChannelDensityVShift, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ChannelDensityVShift, self)._buildChildren(child_, node, nodeName_, True) pass # end class ChannelDensityVShift class Cell(BaseCell): - """Should only be used if morphology element is outside the cell. This - points to the id of the morphology Should only be used if - biophysicalProperties element is outside the cell. This points - to the id of the biophysicalProperties""" + """morphology -- Should only be used if morphology element is outside the cell. + This points to the id of the morphology + + * biophysicalProperties -- Should only be used if biophysicalProperties element is outside the cell. + This points to the id of the biophysicalProperties + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('morphology_attr', 'xs:string', 0, 1, {'use': 'optional'}), - MemberSpec_('biophysical_properties_attr', 'xs:string', 0, 1, {'use': 'optional'}), - MemberSpec_('morphology', 'Morphology', 0, 1, {u'type': u'Morphology', u'name': u'morphology', u'minOccurs': u'0'}, None), - MemberSpec_('biophysical_properties', 'BiophysicalProperties', 0, 1, {u'type': u'BiophysicalProperties', u'name': u'biophysicalProperties', u'minOccurs': u'0'}, None), + MemberSpec_('morphology_attr', 'NmlId', 0, 1, {'use': 'optional', 'name': 'morphology_attr'}), + MemberSpec_('biophysical_properties_attr', 'NmlId', 0, 1, {'use': 'optional', 'name': 'biophysical_properties_attr'}), + MemberSpec_('morphology', 'Morphology', 0, 1, {'minOccurs': '0', 'name': 'morphology', 'type': 'Morphology'}, None), + MemberSpec_('biophysical_properties', 'BiophysicalProperties', 0, 1, {'minOccurs': '0', 'name': 'biophysicalProperties', 'type': 'BiophysicalProperties'}, None), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, morphology_attr=None, biophysical_properties_attr=None, morphology=None, biophysical_properties=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, morphology_attr=None, biophysical_properties_attr=None, morphology=None, biophysical_properties=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Cell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Cell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.morphology_attr = _cast(None, morphology_attr) + self.morphology_attr_nsprefix_ = None self.biophysical_properties_attr = _cast(None, biophysical_properties_attr) + self.biophysical_properties_attr_nsprefix_ = None self.morphology = morphology + self.morphology_nsprefix_ = None self.biophysical_properties = biophysical_properties + self.biophysical_properties_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -17813,16 +24288,49 @@ def factory(*args_, **kwargs_): else: return Cell(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_morphology(self): + return self.morphology + def set_morphology(self, morphology): + self.morphology = morphology + def get_biophysicalProperties(self): + return self.biophysical_properties + def set_biophysicalProperties(self, biophysical_properties): + self.biophysical_properties = biophysical_properties + def get_morphology_attr(self): + return self.morphology_attr + def set_morphology_attr(self, morphology_attr): + self.morphology_attr = morphology_attr + def get_biophysical_properties_attr(self): + return self.biophysical_properties_attr + def set_biophysical_properties_attr(self, biophysical_properties_attr): + self.biophysical_properties_attr = biophysical_properties_attr + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def validate_NmlId(self, value): + # Validate type NmlId, a restriction on xs:string. + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( self.morphology is not None or self.biophysical_properties is not None or - super(Cell, self).hasContent_() + super(Cell, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Cell', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Cell', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Cell') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -17830,21 +24338,23 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Cell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Cell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Cell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Cell'): - super(Cell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Cell'): + super(Cell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell') if self.morphology_attr is not None and 'morphology_attr' not in already_processed: already_processed.add('morphology_attr') outfile.write(' morphology=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.morphology_attr), input_name='morphology_attr')), )) @@ -17854,50 +24364,62 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Cell', fromsubclass_=False, pretty_print=True): - super(Cell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Cell', fromsubclass_=False, pretty_print=True): + super(Cell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.morphology is not None: + namespaceprefix_ = self.morphology_nsprefix_ + ':' if (UseCapturedNS_ and self.morphology_nsprefix_) else '' self.morphology.export(outfile, level, namespaceprefix_, namespacedef_='', name_='morphology', pretty_print=pretty_print) if self.biophysical_properties is not None: + namespaceprefix_ = self.biophysical_properties_nsprefix_ + ':' if (UseCapturedNS_ and self.biophysical_properties_nsprefix_) else '' self.biophysical_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='biophysicalProperties', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('morphology', node) if value is not None and 'morphology_attr' not in already_processed: already_processed.add('morphology_attr') self.morphology_attr = value + self.validate_NmlId(self.morphology_attr) # validate type NmlId value = find_attr_value_('biophysicalProperties', node) if value is not None and 'biophysical_properties_attr' not in already_processed: already_processed.add('biophysical_properties_attr') self.biophysical_properties_attr = value + self.validate_NmlId(self.biophysical_properties_attr) # validate type NmlId value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(Cell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(Cell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'morphology': obj_ = Morphology.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.morphology = obj_ obj_.original_tagname_ = 'morphology' elif nodeName_ == 'biophysicalProperties': obj_ = BiophysicalProperties.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.biophysical_properties = obj_ obj_.original_tagname_ = 'biophysicalProperties' - super(Cell, self).buildChildren(child_, node, nodeName_, True) + super(Cell, self)._buildChildren(child_, node, nodeName_, True) # Get segment object by its id @@ -18254,56 +24776,81 @@ def summary(self): class PinskyRinzelCA3Cell(BaseCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('i_soma', 'Nml2Quantity_currentDensity', 0, 0, {'use': u'required'}), - MemberSpec_('i_dend', 'Nml2Quantity_currentDensity', 0, 0, {'use': u'required'}), - MemberSpec_('gc', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_ls', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_ld', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_na', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_kdr', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_ca', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_kahp', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_kc', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_nmda', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_ampa', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('e_na', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('e_ca', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('e_k', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('e_l', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('qd0', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('pp', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('alphac', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('betac', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('cm', 'Nml2Quantity_specificCapacitance', 0, 0, {'use': u'required'}), + MemberSpec_('i_soma', 'Nml2Quantity_currentDensity', 0, 0, {'use': 'required', 'name': 'i_soma'}), + MemberSpec_('i_dend', 'Nml2Quantity_currentDensity', 0, 0, {'use': 'required', 'name': 'i_dend'}), + MemberSpec_('gc', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': 'required', 'name': 'gc'}), + MemberSpec_('g_ls', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': 'required', 'name': 'g_ls'}), + MemberSpec_('g_ld', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': 'required', 'name': 'g_ld'}), + MemberSpec_('g_na', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': 'required', 'name': 'g_na'}), + MemberSpec_('g_kdr', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': 'required', 'name': 'g_kdr'}), + MemberSpec_('g_ca', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': 'required', 'name': 'g_ca'}), + MemberSpec_('g_kahp', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': 'required', 'name': 'g_kahp'}), + MemberSpec_('g_kc', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': 'required', 'name': 'g_kc'}), + MemberSpec_('g_nmda', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': 'required', 'name': 'g_nmda'}), + MemberSpec_('g_ampa', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': 'required', 'name': 'g_ampa'}), + MemberSpec_('e_na', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'e_na'}), + MemberSpec_('e_ca', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'e_ca'}), + MemberSpec_('e_k', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'e_k'}), + MemberSpec_('e_l', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'e_l'}), + MemberSpec_('qd0', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'qd0'}), + MemberSpec_('pp', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'pp'}), + MemberSpec_('alphac', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'alphac'}), + MemberSpec_('betac', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'betac'}), + MemberSpec_('cm', 'Nml2Quantity_specificCapacitance', 0, 0, {'use': 'required', 'name': 'cm'}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, i_soma=None, i_dend=None, gc=None, g_ls=None, g_ld=None, g_na=None, g_kdr=None, g_ca=None, g_kahp=None, g_kc=None, g_nmda=None, g_ampa=None, e_na=None, e_ca=None, e_k=None, e_l=None, qd0=None, pp=None, alphac=None, betac=None, cm=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, i_soma=None, i_dend=None, gc=None, g_ls=None, g_ld=None, g_na=None, g_kdr=None, g_ca=None, g_kahp=None, g_kc=None, g_nmda=None, g_ampa=None, e_na=None, e_ca=None, e_k=None, e_l=None, qd0=None, pp=None, alphac=None, betac=None, cm=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(PinskyRinzelCA3Cell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("PinskyRinzelCA3Cell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.i_soma = _cast(None, i_soma) + self.i_soma_nsprefix_ = None self.i_dend = _cast(None, i_dend) + self.i_dend_nsprefix_ = None self.gc = _cast(None, gc) + self.gc_nsprefix_ = None self.g_ls = _cast(None, g_ls) + self.g_ls_nsprefix_ = None self.g_ld = _cast(None, g_ld) + self.g_ld_nsprefix_ = None self.g_na = _cast(None, g_na) + self.g_na_nsprefix_ = None self.g_kdr = _cast(None, g_kdr) + self.g_kdr_nsprefix_ = None self.g_ca = _cast(None, g_ca) + self.g_ca_nsprefix_ = None self.g_kahp = _cast(None, g_kahp) + self.g_kahp_nsprefix_ = None self.g_kc = _cast(None, g_kc) + self.g_kc_nsprefix_ = None self.g_nmda = _cast(None, g_nmda) + self.g_nmda_nsprefix_ = None self.g_ampa = _cast(None, g_ampa) + self.g_ampa_nsprefix_ = None self.e_na = _cast(None, e_na) + self.e_na_nsprefix_ = None self.e_ca = _cast(None, e_ca) + self.e_ca_nsprefix_ = None self.e_k = _cast(None, e_k) + self.e_k_nsprefix_ = None self.e_l = _cast(None, e_l) + self.e_l_nsprefix_ = None self.qd0 = _cast(None, qd0) + self.qd0_nsprefix_ = None self.pp = _cast(None, pp) + self.pp_nsprefix_ = None self.alphac = _cast(None, alphac) + self.alphac_nsprefix_ = None self.betac = _cast(None, betac) + self.betac_nsprefix_ = None self.cm = _cast(None, cm) + self.cm_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -18315,44 +24862,152 @@ def factory(*args_, **kwargs_): else: return PinskyRinzelCA3Cell(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_iSoma(self): + return self.i_soma + def set_iSoma(self, i_soma): + self.i_soma = i_soma + def get_iDend(self): + return self.i_dend + def set_iDend(self, i_dend): + self.i_dend = i_dend + def get_gc(self): + return self.gc + def set_gc(self, gc): + self.gc = gc + def get_gLs(self): + return self.g_ls + def set_gLs(self, g_ls): + self.g_ls = g_ls + def get_gLd(self): + return self.g_ld + def set_gLd(self, g_ld): + self.g_ld = g_ld + def get_gNa(self): + return self.g_na + def set_gNa(self, g_na): + self.g_na = g_na + def get_gKdr(self): + return self.g_kdr + def set_gKdr(self, g_kdr): + self.g_kdr = g_kdr + def get_gCa(self): + return self.g_ca + def set_gCa(self, g_ca): + self.g_ca = g_ca + def get_gKahp(self): + return self.g_kahp + def set_gKahp(self, g_kahp): + self.g_kahp = g_kahp + def get_gKC(self): + return self.g_kc + def set_gKC(self, g_kc): + self.g_kc = g_kc + def get_gNmda(self): + return self.g_nmda + def set_gNmda(self, g_nmda): + self.g_nmda = g_nmda + def get_gAmpa(self): + return self.g_ampa + def set_gAmpa(self, g_ampa): + self.g_ampa = g_ampa + def get_eNa(self): + return self.e_na + def set_eNa(self, e_na): + self.e_na = e_na + def get_eCa(self): + return self.e_ca + def set_eCa(self, e_ca): + self.e_ca = e_ca + def get_eK(self): + return self.e_k + def set_eK(self, e_k): + self.e_k = e_k + def get_eL(self): + return self.e_l + def set_eL(self, e_l): + self.e_l = e_l + def get_qd0(self): + return self.qd0 + def set_qd0(self, qd0): + self.qd0 = qd0 + def get_pp(self): + return self.pp + def set_pp(self, pp): + self.pp = pp + def get_alphac(self): + return self.alphac + def set_alphac(self, alphac): + self.alphac = alphac + def get_betac(self): + return self.betac + def set_betac(self, betac): + self.betac = betac + def get_cm(self): + return self.cm + def set_cm(self, cm): + self.cm = cm def validate_Nml2Quantity_currentDensity(self, value): # Validate type Nml2Quantity_currentDensity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_currentDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_currentDensity_patterns_, )) - validate_Nml2Quantity_currentDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A_per_m2|uA_per_cm2|mA_per_cm2)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_currentDensity_patterns_, )) + validate_Nml2Quantity_currentDensity_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A_per_m2|uA_per_cm2|mA_per_cm2))$']] def validate_Nml2Quantity_conductanceDensity(self, value): # Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductanceDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) - validate_Nml2Quantity_conductanceDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) + validate_Nml2Quantity_conductanceDensity_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2))$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_none_patterns_, )) + validate_Nml2Quantity_none_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$']] def validate_Nml2Quantity_specificCapacitance(self, value): # Validate type Nml2Quantity_specificCapacitance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_specificCapacitance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_specificCapacitance_patterns_, )) - validate_Nml2Quantity_specificCapacitance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F_per_m2|uF_per_cm2)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_specificCapacitance_patterns_, )) + validate_Nml2Quantity_specificCapacitance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F_per_m2|uF_per_cm2))$']] + def _hasContent(self): if ( - super(PinskyRinzelCA3Cell, self).hasContent_() + super(PinskyRinzelCA3Cell, self)._hasContent() ): return True else: @@ -18365,94 +25020,100 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='P eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'PinskyRinzelCA3Cell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PinskyRinzelCA3Cell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PinskyRinzelCA3Cell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PinskyRinzelCA3Cell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PinskyRinzelCA3Cell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PinskyRinzelCA3Cell'): - super(PinskyRinzelCA3Cell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PinskyRinzelCA3Cell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PinskyRinzelCA3Cell'): + super(PinskyRinzelCA3Cell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PinskyRinzelCA3Cell') if self.i_soma is not None and 'i_soma' not in already_processed: already_processed.add('i_soma') - outfile.write(' iSoma=%s' % (quote_attrib(self.i_soma), )) + outfile.write(' iSoma=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.i_soma), input_name='iSoma')), )) if self.i_dend is not None and 'i_dend' not in already_processed: already_processed.add('i_dend') - outfile.write(' iDend=%s' % (quote_attrib(self.i_dend), )) + outfile.write(' iDend=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.i_dend), input_name='iDend')), )) if self.gc is not None and 'gc' not in already_processed: already_processed.add('gc') - outfile.write(' gc=%s' % (quote_attrib(self.gc), )) + outfile.write(' gc=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.gc), input_name='gc')), )) if self.g_ls is not None and 'g_ls' not in already_processed: already_processed.add('g_ls') - outfile.write(' gLs=%s' % (quote_attrib(self.g_ls), )) + outfile.write(' gLs=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.g_ls), input_name='gLs')), )) if self.g_ld is not None and 'g_ld' not in already_processed: already_processed.add('g_ld') - outfile.write(' gLd=%s' % (quote_attrib(self.g_ld), )) + outfile.write(' gLd=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.g_ld), input_name='gLd')), )) if self.g_na is not None and 'g_na' not in already_processed: already_processed.add('g_na') - outfile.write(' gNa=%s' % (quote_attrib(self.g_na), )) + outfile.write(' gNa=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.g_na), input_name='gNa')), )) if self.g_kdr is not None and 'g_kdr' not in already_processed: already_processed.add('g_kdr') - outfile.write(' gKdr=%s' % (quote_attrib(self.g_kdr), )) + outfile.write(' gKdr=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.g_kdr), input_name='gKdr')), )) if self.g_ca is not None and 'g_ca' not in already_processed: already_processed.add('g_ca') - outfile.write(' gCa=%s' % (quote_attrib(self.g_ca), )) + outfile.write(' gCa=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.g_ca), input_name='gCa')), )) if self.g_kahp is not None and 'g_kahp' not in already_processed: already_processed.add('g_kahp') - outfile.write(' gKahp=%s' % (quote_attrib(self.g_kahp), )) + outfile.write(' gKahp=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.g_kahp), input_name='gKahp')), )) if self.g_kc is not None and 'g_kc' not in already_processed: already_processed.add('g_kc') - outfile.write(' gKC=%s' % (quote_attrib(self.g_kc), )) + outfile.write(' gKC=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.g_kc), input_name='gKC')), )) if self.g_nmda is not None and 'g_nmda' not in already_processed: already_processed.add('g_nmda') - outfile.write(' gNmda=%s' % (quote_attrib(self.g_nmda), )) + outfile.write(' gNmda=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.g_nmda), input_name='gNmda')), )) if self.g_ampa is not None and 'g_ampa' not in already_processed: already_processed.add('g_ampa') - outfile.write(' gAmpa=%s' % (quote_attrib(self.g_ampa), )) + outfile.write(' gAmpa=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.g_ampa), input_name='gAmpa')), )) if self.e_na is not None and 'e_na' not in already_processed: already_processed.add('e_na') - outfile.write(' eNa=%s' % (quote_attrib(self.e_na), )) + outfile.write(' eNa=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.e_na), input_name='eNa')), )) if self.e_ca is not None and 'e_ca' not in already_processed: already_processed.add('e_ca') - outfile.write(' eCa=%s' % (quote_attrib(self.e_ca), )) + outfile.write(' eCa=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.e_ca), input_name='eCa')), )) if self.e_k is not None and 'e_k' not in already_processed: already_processed.add('e_k') - outfile.write(' eK=%s' % (quote_attrib(self.e_k), )) + outfile.write(' eK=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.e_k), input_name='eK')), )) if self.e_l is not None and 'e_l' not in already_processed: already_processed.add('e_l') - outfile.write(' eL=%s' % (quote_attrib(self.e_l), )) + outfile.write(' eL=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.e_l), input_name='eL')), )) if self.qd0 is not None and 'qd0' not in already_processed: already_processed.add('qd0') - outfile.write(' qd0=%s' % (quote_attrib(self.qd0), )) + outfile.write(' qd0=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.qd0), input_name='qd0')), )) if self.pp is not None and 'pp' not in already_processed: already_processed.add('pp') - outfile.write(' pp=%s' % (quote_attrib(self.pp), )) + outfile.write(' pp=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.pp), input_name='pp')), )) if self.alphac is not None and 'alphac' not in already_processed: already_processed.add('alphac') - outfile.write(' alphac=%s' % (quote_attrib(self.alphac), )) + outfile.write(' alphac=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.alphac), input_name='alphac')), )) if self.betac is not None and 'betac' not in already_processed: already_processed.add('betac') - outfile.write(' betac=%s' % (quote_attrib(self.betac), )) + outfile.write(' betac=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.betac), input_name='betac')), )) if self.cm is not None and 'cm' not in already_processed: already_processed.add('cm') - outfile.write(' cm=%s' % (quote_attrib(self.cm), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PinskyRinzelCA3Cell', fromsubclass_=False, pretty_print=True): - super(PinskyRinzelCA3Cell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' cm=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.cm), input_name='cm')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PinskyRinzelCA3Cell', fromsubclass_=False, pretty_print=True): + super(PinskyRinzelCA3Cell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('iSoma', node) if value is not None and 'iSoma' not in already_processed: already_processed.add('iSoma') @@ -18558,34 +25219,44 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('cm') self.cm = value self.validate_Nml2Quantity_specificCapacitance(self.cm) # validate type Nml2Quantity_specificCapacitance - super(PinskyRinzelCA3Cell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(PinskyRinzelCA3Cell, self).buildChildren(child_, node, nodeName_, True) + super(PinskyRinzelCA3Cell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(PinskyRinzelCA3Cell, self)._buildChildren(child_, node, nodeName_, True) pass # end class PinskyRinzelCA3Cell class FitzHughNagumo1969Cell(BaseCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('a', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('I', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('phi', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('V0', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('W0', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_('a', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'a'}), + MemberSpec_('b', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'b'}), + MemberSpec_('I', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'I'}), + MemberSpec_('phi', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'phi'}), + MemberSpec_('V0', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'V0'}), + MemberSpec_('W0', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'W0'}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, a=None, b=None, I=None, phi=None, V0=None, W0=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, a=None, b=None, I=None, phi=None, V0=None, W0=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(FitzHughNagumo1969Cell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("FitzHughNagumo1969Cell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.a = _cast(None, a) + self.a_nsprefix_ = None self.b = _cast(None, b) + self.b_nsprefix_ = None self.I = _cast(None, I) + self.I_nsprefix_ = None self.phi = _cast(None, phi) + self.phi_nsprefix_ = None self.V0 = _cast(None, V0) + self.V0_nsprefix_ = None self.W0 = _cast(None, W0) + self.W0_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -18597,16 +25268,48 @@ def factory(*args_, **kwargs_): else: return FitzHughNagumo1969Cell(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_a(self): + return self.a + def set_a(self, a): + self.a = a + def get_b(self): + return self.b + def set_b(self, b): + self.b = b + def get_I(self): + return self.I + def set_I(self, I): + self.I = I + def get_phi(self): + return self.phi + def set_phi(self, phi): + self.phi = phi + def get_V0(self): + return self.V0 + def set_V0(self, V0): + self.V0 = V0 + def get_W0(self): + return self.W0 + def set_W0(self, W0): + self.W0 = W0 def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_none_patterns_, )) + validate_Nml2Quantity_none_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$']] + def _hasContent(self): if ( - super(FitzHughNagumo1969Cell, self).hasContent_() + super(FitzHughNagumo1969Cell, self)._hasContent() ): return True else: @@ -18619,49 +25322,55 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='F eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'FitzHughNagumo1969Cell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumo1969Cell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumo1969Cell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FitzHughNagumo1969Cell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FitzHughNagumo1969Cell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FitzHughNagumo1969Cell'): - super(FitzHughNagumo1969Cell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumo1969Cell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FitzHughNagumo1969Cell'): + super(FitzHughNagumo1969Cell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumo1969Cell') if self.a is not None and 'a' not in already_processed: already_processed.add('a') - outfile.write(' a=%s' % (quote_attrib(self.a), )) + outfile.write(' a=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.a), input_name='a')), )) if self.b is not None and 'b' not in already_processed: already_processed.add('b') - outfile.write(' b=%s' % (quote_attrib(self.b), )) + outfile.write(' b=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.b), input_name='b')), )) if self.I is not None and 'I' not in already_processed: already_processed.add('I') - outfile.write(' I=%s' % (quote_attrib(self.I), )) + outfile.write(' I=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.I), input_name='I')), )) if self.phi is not None and 'phi' not in already_processed: already_processed.add('phi') - outfile.write(' phi=%s' % (quote_attrib(self.phi), )) + outfile.write(' phi=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.phi), input_name='phi')), )) if self.V0 is not None and 'V0' not in already_processed: already_processed.add('V0') - outfile.write(' V0=%s' % (quote_attrib(self.V0), )) + outfile.write(' V0=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.V0), input_name='V0')), )) if self.W0 is not None and 'W0' not in already_processed: already_processed.add('W0') - outfile.write(' W0=%s' % (quote_attrib(self.W0), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FitzHughNagumo1969Cell', fromsubclass_=False, pretty_print=True): - super(FitzHughNagumo1969Cell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' W0=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.W0), input_name='W0')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FitzHughNagumo1969Cell', fromsubclass_=False, pretty_print=True): + super(FitzHughNagumo1969Cell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('a', node) if value is not None and 'a' not in already_processed: already_processed.add('a') @@ -18692,24 +25401,29 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('W0') self.W0 = value self.validate_Nml2Quantity_none(self.W0) # validate type Nml2Quantity_none - super(FitzHughNagumo1969Cell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(FitzHughNagumo1969Cell, self).buildChildren(child_, node, nodeName_, True) + super(FitzHughNagumo1969Cell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(FitzHughNagumo1969Cell, self)._buildChildren(child_, node, nodeName_, True) pass # end class FitzHughNagumo1969Cell class FitzHughNagumoCell(BaseCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('I', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_('I', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'I'}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, I=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, I=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(FitzHughNagumoCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("FitzHughNagumoCell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.I = _cast(None, I) + self.I_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -18721,16 +25435,28 @@ def factory(*args_, **kwargs_): else: return FitzHughNagumoCell(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_I(self): + return self.I + def set_I(self, I): + self.I = I def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_none_patterns_, )) + validate_Nml2Quantity_none_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$']] + def _hasContent(self): if ( - super(FitzHughNagumoCell, self).hasContent_() + super(FitzHughNagumoCell, self)._hasContent() ): return True else: @@ -18743,59 +25469,71 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='F eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'FitzHughNagumoCell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumoCell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumoCell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FitzHughNagumoCell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FitzHughNagumoCell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FitzHughNagumoCell'): - super(FitzHughNagumoCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumoCell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FitzHughNagumoCell'): + super(FitzHughNagumoCell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumoCell') if self.I is not None and 'I' not in already_processed: already_processed.add('I') - outfile.write(' I=%s' % (quote_attrib(self.I), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FitzHughNagumoCell', fromsubclass_=False, pretty_print=True): - super(FitzHughNagumoCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' I=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.I), input_name='I')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FitzHughNagumoCell', fromsubclass_=False, pretty_print=True): + super(FitzHughNagumoCell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('I', node) if value is not None and 'I' not in already_processed: already_processed.add('I') self.I = value self.validate_Nml2Quantity_none(self.I) # validate type Nml2Quantity_none - super(FitzHughNagumoCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(FitzHughNagumoCell, self).buildChildren(child_, node, nodeName_, True) + super(FitzHughNagumoCell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(FitzHughNagumoCell, self)._buildChildren(child_, node, nodeName_, True) pass # end class FitzHughNagumoCell class BaseCellMembPotCap(BaseCell): - """This is to prevent it conflicting with attribute c (lowercase) e.g. - in izhikevichCell2007""" + """C -- This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007 + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('C', 'Nml2Quantity_capacitance', 0, 0, {'use': u'required'}), + MemberSpec_('C', 'Nml2Quantity_capacitance', 0, 0, {'use': 'required', 'name': 'C'}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, C=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, C=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BaseCellMembPotCap, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BaseCellMembPotCap"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.C = _cast(None, C) + self.C_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -18808,16 +25546,30 @@ def factory(*args_, **kwargs_): else: return BaseCellMembPotCap(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_C(self): + return self.C + def set_C(self, C): + self.C = C + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_capacitance(self, value): # Validate type Nml2Quantity_capacitance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_capacitance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_capacitance_patterns_, )) - validate_Nml2Quantity_capacitance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F|uF|nF|pF)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_capacitance_patterns_, )) + validate_Nml2Quantity_capacitance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F|uF|nF|pF))$']] + def _hasContent(self): if ( - super(BaseCellMembPotCap, self).hasContent_() + super(BaseCellMembPotCap, self)._hasContent() ): return True else: @@ -18830,38 +25582,48 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseCellMembPotCap': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCellMembPotCap') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCellMembPotCap') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseCellMembPotCap', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseCellMembPotCap', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseCellMembPotCap'): - super(BaseCellMembPotCap, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCellMembPotCap') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseCellMembPotCap'): + super(BaseCellMembPotCap, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCellMembPotCap') if self.C is not None and 'C' not in already_processed: already_processed.add('C') - outfile.write(' C=%s' % (quote_attrib(self.C), )) + outfile.write(' C=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.C), input_name='C')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCellMembPotCap', fromsubclass_=False, pretty_print=True): - super(BaseCellMembPotCap, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCellMembPotCap', fromsubclass_=False, pretty_print=True): + super(BaseCellMembPotCap, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('C', node) if value is not None and 'C' not in already_processed: already_processed.add('C') @@ -18871,34 +25633,44 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BaseCellMembPotCap, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseCellMembPotCap, self).buildChildren(child_, node, nodeName_, True) + super(BaseCellMembPotCap, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BaseCellMembPotCap, self)._buildChildren(child_, node, nodeName_, True) pass # end class BaseCellMembPotCap class IzhikevichCell(BaseCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('v0', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('a', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('c', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('d', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_('v0', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'v0'}), + MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'thresh'}), + MemberSpec_('a', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'a'}), + MemberSpec_('b', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'b'}), + MemberSpec_('c', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'c'}), + MemberSpec_('d', 'Nml2Quantity_none', 0, 0, {'use': 'required', 'name': 'd'}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, v0=None, thresh=None, a=None, b=None, c=None, d=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, v0=None, thresh=None, a=None, b=None, c=None, d=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IzhikevichCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IzhikevichCell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.v0 = _cast(None, v0) + self.v0_nsprefix_ = None self.thresh = _cast(None, thresh) + self.thresh_nsprefix_ = None self.a = _cast(None, a) + self.a_nsprefix_ = None self.b = _cast(None, b) + self.b_nsprefix_ = None self.c = _cast(None, c) + self.c_nsprefix_ = None self.d = _cast(None, d) + self.d_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -18910,23 +25682,59 @@ def factory(*args_, **kwargs_): else: return IzhikevichCell(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_v0(self): + return self.v0 + def set_v0(self, v0): + self.v0 = v0 + def get_thresh(self): + return self.thresh + def set_thresh(self, thresh): + self.thresh = thresh + def get_a(self): + return self.a + def set_a(self, a): + self.a = a + def get_b(self): + return self.b + def set_b(self, b): + self.b = b + def get_c(self): + return self.c + def set_c(self, c): + self.c = c + def get_d(self): + return self.d + def set_d(self, d): + self.d = d def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_none_patterns_, )) + validate_Nml2Quantity_none_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$']] + def _hasContent(self): if ( - super(IzhikevichCell, self).hasContent_() + super(IzhikevichCell, self)._hasContent() ): return True else: @@ -18939,49 +25747,55 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IzhikevichCell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IzhikevichCell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IzhikevichCell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IzhikevichCell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IzhikevichCell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IzhikevichCell'): - super(IzhikevichCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IzhikevichCell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IzhikevichCell'): + super(IzhikevichCell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IzhikevichCell') if self.v0 is not None and 'v0' not in already_processed: already_processed.add('v0') - outfile.write(' v0=%s' % (quote_attrib(self.v0), )) + outfile.write(' v0=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.v0), input_name='v0')), )) if self.thresh is not None and 'thresh' not in already_processed: already_processed.add('thresh') - outfile.write(' thresh=%s' % (quote_attrib(self.thresh), )) + outfile.write(' thresh=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.thresh), input_name='thresh')), )) if self.a is not None and 'a' not in already_processed: already_processed.add('a') - outfile.write(' a=%s' % (quote_attrib(self.a), )) + outfile.write(' a=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.a), input_name='a')), )) if self.b is not None and 'b' not in already_processed: already_processed.add('b') - outfile.write(' b=%s' % (quote_attrib(self.b), )) + outfile.write(' b=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.b), input_name='b')), )) if self.c is not None and 'c' not in already_processed: already_processed.add('c') - outfile.write(' c=%s' % (quote_attrib(self.c), )) + outfile.write(' c=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.c), input_name='c')), )) if self.d is not None and 'd' not in already_processed: already_processed.add('d') - outfile.write(' d=%s' % (quote_attrib(self.d), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IzhikevichCell', fromsubclass_=False, pretty_print=True): - super(IzhikevichCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' d=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.d), input_name='d')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IzhikevichCell', fromsubclass_=False, pretty_print=True): + super(IzhikevichCell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('v0', node) if value is not None and 'v0' not in already_processed: already_processed.add('v0') @@ -19012,32 +25826,41 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('d') self.d = value self.validate_Nml2Quantity_none(self.d) # validate type Nml2Quantity_none - super(IzhikevichCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IzhikevichCell, self).buildChildren(child_, node, nodeName_, True) + super(IzhikevichCell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(IzhikevichCell, self)._buildChildren(child_, node, nodeName_, True) pass # end class IzhikevichCell class IafCell(BaseCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('leak_reversal', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('reset', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('C', 'Nml2Quantity_capacitance', 0, 0, {'use': u'required'}), - MemberSpec_('leak_conductance', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), + MemberSpec_('leak_reversal', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'leak_reversal'}), + MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'thresh'}), + MemberSpec_('reset', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'reset'}), + MemberSpec_('C', 'Nml2Quantity_capacitance', 0, 0, {'use': 'required', 'name': 'C'}), + MemberSpec_('leak_conductance', 'Nml2Quantity_conductance', 0, 0, {'use': 'required', 'name': 'leak_conductance'}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, C=None, leak_conductance=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, C=None, leak_conductance=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IafCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IafCell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.leak_reversal = _cast(None, leak_reversal) + self.leak_reversal_nsprefix_ = None self.thresh = _cast(None, thresh) + self.thresh_nsprefix_ = None self.reset = _cast(None, reset) + self.reset_nsprefix_ = None self.C = _cast(None, C) + self.C_nsprefix_ = None self.leak_conductance = _cast(None, leak_conductance) + self.leak_conductance_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -19050,30 +25873,68 @@ def factory(*args_, **kwargs_): else: return IafCell(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_leakReversal(self): + return self.leak_reversal + def set_leakReversal(self, leak_reversal): + self.leak_reversal = leak_reversal + def get_thresh(self): + return self.thresh + def set_thresh(self, thresh): + self.thresh = thresh + def get_reset(self): + return self.reset + def set_reset(self, reset): + self.reset = reset + def get_C(self): + return self.C + def set_C(self, C): + self.C = C + def get_leakConductance(self): + return self.leak_conductance + def set_leakConductance(self, leak_conductance): + self.leak_conductance = leak_conductance + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] def validate_Nml2Quantity_capacitance(self, value): # Validate type Nml2Quantity_capacitance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_capacitance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_capacitance_patterns_, )) - validate_Nml2Quantity_capacitance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F|uF|nF|pF)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_capacitance_patterns_, )) + validate_Nml2Quantity_capacitance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F|uF|nF|pF))$']] def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductance_patterns_, )) + validate_Nml2Quantity_conductance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$']] + def _hasContent(self): if ( - super(IafCell, self).hasContent_() + super(IafCell, self)._hasContent() ): return True else: @@ -19086,50 +25947,60 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IafCell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafCell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafCell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafCell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafCell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafCell'): - super(IafCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafCell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafCell'): + super(IafCell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafCell') if self.leak_reversal is not None and 'leak_reversal' not in already_processed: already_processed.add('leak_reversal') - outfile.write(' leakReversal=%s' % (quote_attrib(self.leak_reversal), )) + outfile.write(' leakReversal=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.leak_reversal), input_name='leakReversal')), )) if self.thresh is not None and 'thresh' not in already_processed: already_processed.add('thresh') - outfile.write(' thresh=%s' % (quote_attrib(self.thresh), )) + outfile.write(' thresh=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.thresh), input_name='thresh')), )) if self.reset is not None and 'reset' not in already_processed: already_processed.add('reset') - outfile.write(' reset=%s' % (quote_attrib(self.reset), )) + outfile.write(' reset=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.reset), input_name='reset')), )) if self.C is not None and 'C' not in already_processed: already_processed.add('C') - outfile.write(' C=%s' % (quote_attrib(self.C), )) + outfile.write(' C=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.C), input_name='C')), )) if self.leak_conductance is not None and 'leak_conductance' not in already_processed: already_processed.add('leak_conductance') - outfile.write(' leakConductance=%s' % (quote_attrib(self.leak_conductance), )) + outfile.write(' leakConductance=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.leak_conductance), input_name='leakConductance')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafCell', fromsubclass_=False, pretty_print=True): - super(IafCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafCell', fromsubclass_=False, pretty_print=True): + super(IafCell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('leakReversal', node) if value is not None and 'leakReversal' not in already_processed: already_processed.add('leakReversal') @@ -19159,30 +26030,38 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(IafCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IafCell, self).buildChildren(child_, node, nodeName_, True) + super(IafCell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(IafCell, self)._buildChildren(child_, node, nodeName_, True) pass # end class IafCell class IafTauCell(BaseCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('leak_reversal', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('reset', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('tau', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('leak_reversal', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'leak_reversal'}), + MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'thresh'}), + MemberSpec_('reset', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'reset'}), + MemberSpec_('tau', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'tau'}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, tau=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, tau=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IafTauCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IafTauCell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.leak_reversal = _cast(None, leak_reversal) + self.leak_reversal_nsprefix_ = None self.thresh = _cast(None, thresh) + self.thresh_nsprefix_ = None self.reset = _cast(None, reset) + self.reset_nsprefix_ = None self.tau = _cast(None, tau) + self.tau_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -19195,23 +26074,53 @@ def factory(*args_, **kwargs_): else: return IafTauCell(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_leakReversal(self): + return self.leak_reversal + def set_leakReversal(self, leak_reversal): + self.leak_reversal = leak_reversal + def get_thresh(self): + return self.thresh + def set_thresh(self, thresh): + self.thresh = thresh + def get_reset(self): + return self.reset + def set_reset(self, reset): + self.reset = reset + def get_tau(self): + return self.tau + def set_tau(self, tau): + self.tau = tau + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(IafTauCell, self).hasContent_() + super(IafTauCell, self)._hasContent() ): return True else: @@ -19224,47 +26133,57 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IafTauCell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauCell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauCell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafTauCell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafTauCell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafTauCell'): - super(IafTauCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauCell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafTauCell'): + super(IafTauCell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauCell') if self.leak_reversal is not None and 'leak_reversal' not in already_processed: already_processed.add('leak_reversal') - outfile.write(' leakReversal=%s' % (quote_attrib(self.leak_reversal), )) + outfile.write(' leakReversal=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.leak_reversal), input_name='leakReversal')), )) if self.thresh is not None and 'thresh' not in already_processed: already_processed.add('thresh') - outfile.write(' thresh=%s' % (quote_attrib(self.thresh), )) + outfile.write(' thresh=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.thresh), input_name='thresh')), )) if self.reset is not None and 'reset' not in already_processed: already_processed.add('reset') - outfile.write(' reset=%s' % (quote_attrib(self.reset), )) + outfile.write(' reset=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.reset), input_name='reset')), )) if self.tau is not None and 'tau' not in already_processed: already_processed.add('tau') - outfile.write(' tau=%s' % (quote_attrib(self.tau), )) + outfile.write(' tau=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tau), input_name='tau')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafTauCell', fromsubclass_=False, pretty_print=True): - super(IafTauCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafTauCell', fromsubclass_=False, pretty_print=True): + super(IafTauCell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('leakReversal', node) if value is not None and 'leakReversal' not in already_processed: already_processed.add('leakReversal') @@ -19289,34 +26208,44 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(IafTauCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IafTauCell, self).buildChildren(child_, node, nodeName_, True) + super(IafTauCell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(IafTauCell, self)._buildChildren(child_, node, nodeName_, True) pass # end class IafTauCell class GradedSynapse(BaseSynapse): - """Based on synapse in Methods of - http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html.""" + """GradedSynapse -- Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('delta', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('Vth', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('k', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 0, {'use': 'required', 'name': 'conductance'}), + MemberSpec_('delta', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'delta'}), + MemberSpec_('Vth', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'Vth'}), + MemberSpec_('k', 'Nml2Quantity_pertime', 0, 0, {'use': 'required', 'name': 'k'}), + MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'erev'}), ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, conductance=None, delta=None, Vth=None, k=None, erev=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, conductance=None, delta=None, Vth=None, k=None, erev=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(GradedSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("GradedSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.conductance = _cast(None, conductance) + self.conductance_nsprefix_ = None self.delta = _cast(None, delta) + self.delta_nsprefix_ = None self.Vth = _cast(None, Vth) + self.Vth_nsprefix_ = None self.k = _cast(None, k) + self.k_nsprefix_ = None self.erev = _cast(None, erev) + self.erev_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -19328,30 +26257,66 @@ def factory(*args_, **kwargs_): else: return GradedSynapse(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_conductance(self): + return self.conductance + def set_conductance(self, conductance): + self.conductance = conductance + def get_delta(self): + return self.delta + def set_delta(self, delta): + self.delta = delta + def get_Vth(self): + return self.Vth + def set_Vth(self, Vth): + self.Vth = Vth + def get_k(self): + return self.k + def set_k(self, k): + self.k = k + def get_erev(self): + return self.erev + def set_erev(self, erev): + self.erev = erev def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductance_patterns_, )) + validate_Nml2Quantity_conductance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_pertime_patterns_, )) + validate_Nml2Quantity_pertime_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$']] + def _hasContent(self): if ( - super(GradedSynapse, self).hasContent_() + super(GradedSynapse, self)._hasContent() ): return True else: @@ -19364,46 +26329,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GradedSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GradedSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GradedSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GradedSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GradedSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GradedSynapse'): - super(GradedSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GradedSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GradedSynapse'): + super(GradedSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GradedSynapse') if self.conductance is not None and 'conductance' not in already_processed: already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) + outfile.write(' conductance=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.conductance), input_name='conductance')), )) if self.delta is not None and 'delta' not in already_processed: already_processed.add('delta') - outfile.write(' delta=%s' % (quote_attrib(self.delta), )) + outfile.write(' delta=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.delta), input_name='delta')), )) if self.Vth is not None and 'Vth' not in already_processed: already_processed.add('Vth') - outfile.write(' Vth=%s' % (quote_attrib(self.Vth), )) + outfile.write(' Vth=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Vth), input_name='Vth')), )) if self.k is not None and 'k' not in already_processed: already_processed.add('k') - outfile.write(' k=%s' % (quote_attrib(self.k), )) + outfile.write(' k=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.k), input_name='k')), )) if self.erev is not None and 'erev' not in already_processed: already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GradedSynapse', fromsubclass_=False, pretty_print=True): - super(GradedSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' erev=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.erev), input_name='erev')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GradedSynapse', fromsubclass_=False, pretty_print=True): + super(GradedSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('conductance', node) if value is not None and 'conductance' not in already_processed: already_processed.add('conductance') @@ -19429,25 +26400,32 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('erev') self.erev = value self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage - super(GradedSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(GradedSynapse, self).buildChildren(child_, node, nodeName_, True) + super(GradedSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(GradedSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class GradedSynapse class LinearGradedSynapse(BaseSynapse): - """Behaves just like a one way gap junction.""" + """LinearGradedSynapse -- Behaves just like a one way gap junction. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), + MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 0, {'use': 'required', 'name': 'conductance'}), ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, conductance=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, conductance=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(LinearGradedSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("LinearGradedSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.conductance = _cast(None, conductance) + self.conductance_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -19459,16 +26437,28 @@ def factory(*args_, **kwargs_): else: return LinearGradedSynapse(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_conductance(self): + return self.conductance + def set_conductance(self, conductance): + self.conductance = conductance def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductance_patterns_, )) + validate_Nml2Quantity_conductance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$']] + def _hasContent(self): if ( - super(LinearGradedSynapse, self).hasContent_() + super(LinearGradedSynapse, self)._hasContent() ): return True else: @@ -19481,57 +26471,68 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='L eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'LinearGradedSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LinearGradedSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LinearGradedSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LinearGradedSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LinearGradedSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LinearGradedSynapse'): - super(LinearGradedSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LinearGradedSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LinearGradedSynapse'): + super(LinearGradedSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LinearGradedSynapse') if self.conductance is not None and 'conductance' not in already_processed: already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LinearGradedSynapse', fromsubclass_=False, pretty_print=True): - super(LinearGradedSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' conductance=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.conductance), input_name='conductance')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LinearGradedSynapse', fromsubclass_=False, pretty_print=True): + super(LinearGradedSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('conductance', node) if value is not None and 'conductance' not in already_processed: already_processed.add('conductance') self.conductance = value self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance - super(LinearGradedSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(LinearGradedSynapse, self).buildChildren(child_, node, nodeName_, True) + super(LinearGradedSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(LinearGradedSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class LinearGradedSynapse class SilentSynapse(BaseSynapse): - """Dummy synapse which emits no current. Used as presynaptic endpoint - for analog synaptic connection (continuousConnection).""" + """SilentSynapse -- Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection (continuousConnection). + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(SilentSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("SilentSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -19543,9 +26544,13 @@ def factory(*args_, **kwargs_): else: return SilentSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(SilentSynapse, self).hasContent_() + super(SilentSynapse, self)._hasContent() ): return True else: @@ -19558,50 +26563,63 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='S eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'SilentSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SilentSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SilentSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SilentSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SilentSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SilentSynapse'): - super(SilentSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SilentSynapse') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SilentSynapse', fromsubclass_=False, pretty_print=True): - super(SilentSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SilentSynapse'): + super(SilentSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SilentSynapse') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SilentSynapse', fromsubclass_=False, pretty_print=True): + super(SilentSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(SilentSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SilentSynapse, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(SilentSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(SilentSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class SilentSynapse class GapJunction(BaseSynapse): - """Gap junction/single electrical connection""" + """GapJunction -- Gap junction/single electrical connection + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), + MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 0, {'use': 'required', 'name': 'conductance'}), ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, conductance=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, conductance=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(GapJunction, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("GapJunction"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.conductance = _cast(None, conductance) + self.conductance_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -19613,16 +26631,28 @@ def factory(*args_, **kwargs_): else: return GapJunction(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_conductance(self): + return self.conductance + def set_conductance(self, conductance): + self.conductance = conductance def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductance_patterns_, )) + validate_Nml2Quantity_conductance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$']] + def _hasContent(self): if ( - super(GapJunction, self).hasContent_() + super(GapJunction, self)._hasContent() ): return True else: @@ -19635,55 +26665,65 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='G eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'GapJunction': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GapJunction') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GapJunction') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GapJunction', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GapJunction', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GapJunction'): - super(GapJunction, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GapJunction') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GapJunction'): + super(GapJunction, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GapJunction') if self.conductance is not None and 'conductance' not in already_processed: already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GapJunction', fromsubclass_=False, pretty_print=True): - super(GapJunction, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' conductance=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.conductance), input_name='conductance')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GapJunction', fromsubclass_=False, pretty_print=True): + super(GapJunction, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('conductance', node) if value is not None and 'conductance' not in already_processed: already_processed.add('conductance') self.conductance = value self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance - super(GapJunction, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(GapJunction, self).buildChildren(child_, node, nodeName_, True) + super(GapJunction, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(GapJunction, self)._buildChildren(child_, node, nodeName_, True) pass # end class GapJunction class BaseCurrentBasedSynapse(BaseSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BaseCurrentBasedSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BaseCurrentBasedSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -19696,9 +26736,15 @@ def factory(*args_, **kwargs_): else: return BaseCurrentBasedSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( - super(BaseCurrentBasedSynapse, self).hasContent_() + super(BaseCurrentBasedSynapse, self)._hasContent() ): return True else: @@ -19711,55 +26757,69 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseCurrentBasedSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCurrentBasedSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCurrentBasedSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseCurrentBasedSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseCurrentBasedSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseCurrentBasedSynapse'): - super(BaseCurrentBasedSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCurrentBasedSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseCurrentBasedSynapse'): + super(BaseCurrentBasedSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCurrentBasedSynapse') if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCurrentBasedSynapse', fromsubclass_=False, pretty_print=True): - super(BaseCurrentBasedSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCurrentBasedSynapse', fromsubclass_=False, pretty_print=True): + super(BaseCurrentBasedSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BaseCurrentBasedSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseCurrentBasedSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BaseCurrentBasedSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BaseCurrentBasedSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class BaseCurrentBasedSynapse class BaseVoltageDepSynapse(BaseSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BaseVoltageDepSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BaseVoltageDepSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -19772,9 +26832,15 @@ def factory(*args_, **kwargs_): else: return BaseVoltageDepSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( - super(BaseVoltageDepSynapse, self).hasContent_() + super(BaseVoltageDepSynapse, self)._hasContent() ): return True else: @@ -19787,105 +26853,130 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseVoltageDepSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseVoltageDepSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseVoltageDepSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseVoltageDepSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseVoltageDepSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseVoltageDepSynapse'): - super(BaseVoltageDepSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseVoltageDepSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseVoltageDepSynapse'): + super(BaseVoltageDepSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseVoltageDepSynapse') if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseVoltageDepSynapse', fromsubclass_=False, pretty_print=True): - super(BaseVoltageDepSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseVoltageDepSynapse', fromsubclass_=False, pretty_print=True): + super(BaseVoltageDepSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BaseVoltageDepSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseVoltageDepSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BaseVoltageDepSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BaseVoltageDepSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class BaseVoltageDepSynapse class IonChannel(IonChannelScalable): - """Note ionChannel and ionChannelHH are currently functionally - identical. This is needed since many existing examples use - ionChannel, some use ionChannelHH. One of these should be - removed, probably ionChannelHH.""" + """IonChannel -- Note ionChannel and ionChannelHH are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. + One of these should be removed, probably ionChannelHH. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('species', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('type', 'channelTypes', 0, 1, {'use': u'optional'}), - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 1, {'use': u'optional'}), - MemberSpec_('gates', 'GateHHUndetermined', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHUndetermined', u'name': u'gate', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_hh_rates', 'GateHHRates', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHRates', u'name': u'gateHHrates', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_h_hrates_taus', 'GateHHRatesTau', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHRatesTau', u'name': u'gateHHratesTau', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_hh_tau_infs', 'GateHHTauInf', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHTauInf', u'name': u'gateHHtauInf', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_h_hrates_infs', 'GateHHRatesInf', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHRatesInf', u'name': u'gateHHratesInf', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_h_hrates_tau_infs', 'GateHHRatesTauInf', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHRatesTauInf', u'name': u'gateHHratesTauInf', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_hh_instantaneouses', 'GateHHInstantaneous', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHInstantaneous', u'name': u'gateHHInstantaneous', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_fractionals', 'GateFractional', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateFractional', u'name': u'gateFractional', u'minOccurs': u'0'}, 1), + MemberSpec_('species', 'NmlId', 0, 1, {'use': 'optional', 'name': 'species'}), + MemberSpec_('type', 'channelTypes', 0, 1, {'use': 'optional', 'name': 'type'}), + MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 1, {'use': 'optional', 'name': 'conductance'}), + MemberSpec_('gates', 'GateHHUndetermined', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'gate', 'type': 'GateHHUndetermined'}, 1), + MemberSpec_('gate_hh_rates', 'GateHHRates', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'gateHHrates', 'type': 'GateHHRates'}, 1), + MemberSpec_('gate_h_hrates_taus', 'GateHHRatesTau', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'gateHHratesTau', 'type': 'GateHHRatesTau'}, 1), + MemberSpec_('gate_hh_tau_infs', 'GateHHTauInf', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'gateHHtauInf', 'type': 'GateHHTauInf'}, 1), + MemberSpec_('gate_h_hrates_infs', 'GateHHRatesInf', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'gateHHratesInf', 'type': 'GateHHRatesInf'}, 1), + MemberSpec_('gate_h_hrates_tau_infs', 'GateHHRatesTauInf', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'gateHHratesTauInf', 'type': 'GateHHRatesTauInf'}, 1), + MemberSpec_('gate_hh_instantaneouses', 'GateHHInstantaneous', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'gateHHInstantaneous', 'type': 'GateHHInstantaneous'}, 1), + MemberSpec_('gate_fractionals', 'GateFractional', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'gateFractional', 'type': 'GateFractional'}, 1), ] subclass = None superclass = IonChannelScalable - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, species=None, type=None, conductance=None, gates=None, gate_hh_rates=None, gate_h_hrates_taus=None, gate_hh_tau_infs=None, gate_h_hrates_infs=None, gate_h_hrates_tau_infs=None, gate_hh_instantaneouses=None, gate_fractionals=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, species=None, type=None, conductance=None, gates=None, gate_hh_rates=None, gate_h_hrates_taus=None, gate_hh_tau_infs=None, gate_h_hrates_infs=None, gate_h_hrates_tau_infs=None, gate_hh_instantaneouses=None, gate_fractionals=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannel, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, q10_conductance_scalings, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IonChannel"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, q10_conductance_scalings, extensiontype_, **kwargs_) self.species = _cast(None, species) + self.species_nsprefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.conductance = _cast(None, conductance) + self.conductance_nsprefix_ = None if gates is None: self.gates = [] else: self.gates = gates + self.gates_nsprefix_ = None if gate_hh_rates is None: self.gate_hh_rates = [] else: self.gate_hh_rates = gate_hh_rates + self.gate_hh_rates_nsprefix_ = None if gate_h_hrates_taus is None: self.gate_h_hrates_taus = [] else: self.gate_h_hrates_taus = gate_h_hrates_taus + self.gate_h_hrates_taus_nsprefix_ = None if gate_hh_tau_infs is None: self.gate_hh_tau_infs = [] else: self.gate_hh_tau_infs = gate_hh_tau_infs + self.gate_hh_tau_infs_nsprefix_ = None if gate_h_hrates_infs is None: self.gate_h_hrates_infs = [] else: self.gate_h_hrates_infs = gate_h_hrates_infs + self.gate_h_hrates_infs_nsprefix_ = None if gate_h_hrates_tau_infs is None: self.gate_h_hrates_tau_infs = [] else: self.gate_h_hrates_tau_infs = gate_h_hrates_tau_infs + self.gate_h_hrates_tau_infs_nsprefix_ = None if gate_hh_instantaneouses is None: self.gate_hh_instantaneouses = [] else: self.gate_hh_instantaneouses = gate_hh_instantaneouses + self.gate_hh_instantaneouses_nsprefix_ = None if gate_fractionals is None: self.gate_fractionals = [] else: self.gate_fractionals = gate_fractionals + self.gate_fractionals_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -19898,33 +26989,140 @@ def factory(*args_, **kwargs_): else: return IonChannel(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_gate(self): + return self.gates + def set_gate(self, gates): + self.gates = gates + def add_gate(self, value): + self.gates.append(value) + def insert_gate_at(self, index, value): + self.gates.insert(index, value) + def replace_gate_at(self, index, value): + self.gates[index] = value + def get_gateHHrates(self): + return self.gate_hh_rates + def set_gateHHrates(self, gate_hh_rates): + self.gate_hh_rates = gate_hh_rates + def add_gateHHrates(self, value): + self.gate_hh_rates.append(value) + def insert_gateHHrates_at(self, index, value): + self.gate_hh_rates.insert(index, value) + def replace_gateHHrates_at(self, index, value): + self.gate_hh_rates[index] = value + def get_gateHHratesTau(self): + return self.gate_h_hrates_taus + def set_gateHHratesTau(self, gate_h_hrates_taus): + self.gate_h_hrates_taus = gate_h_hrates_taus + def add_gateHHratesTau(self, value): + self.gate_h_hrates_taus.append(value) + def insert_gateHHratesTau_at(self, index, value): + self.gate_h_hrates_taus.insert(index, value) + def replace_gateHHratesTau_at(self, index, value): + self.gate_h_hrates_taus[index] = value + def get_gateHHtauInf(self): + return self.gate_hh_tau_infs + def set_gateHHtauInf(self, gate_hh_tau_infs): + self.gate_hh_tau_infs = gate_hh_tau_infs + def add_gateHHtauInf(self, value): + self.gate_hh_tau_infs.append(value) + def insert_gateHHtauInf_at(self, index, value): + self.gate_hh_tau_infs.insert(index, value) + def replace_gateHHtauInf_at(self, index, value): + self.gate_hh_tau_infs[index] = value + def get_gateHHratesInf(self): + return self.gate_h_hrates_infs + def set_gateHHratesInf(self, gate_h_hrates_infs): + self.gate_h_hrates_infs = gate_h_hrates_infs + def add_gateHHratesInf(self, value): + self.gate_h_hrates_infs.append(value) + def insert_gateHHratesInf_at(self, index, value): + self.gate_h_hrates_infs.insert(index, value) + def replace_gateHHratesInf_at(self, index, value): + self.gate_h_hrates_infs[index] = value + def get_gateHHratesTauInf(self): + return self.gate_h_hrates_tau_infs + def set_gateHHratesTauInf(self, gate_h_hrates_tau_infs): + self.gate_h_hrates_tau_infs = gate_h_hrates_tau_infs + def add_gateHHratesTauInf(self, value): + self.gate_h_hrates_tau_infs.append(value) + def insert_gateHHratesTauInf_at(self, index, value): + self.gate_h_hrates_tau_infs.insert(index, value) + def replace_gateHHratesTauInf_at(self, index, value): + self.gate_h_hrates_tau_infs[index] = value + def get_gateHHInstantaneous(self): + return self.gate_hh_instantaneouses + def set_gateHHInstantaneous(self, gate_hh_instantaneouses): + self.gate_hh_instantaneouses = gate_hh_instantaneouses + def add_gateHHInstantaneous(self, value): + self.gate_hh_instantaneouses.append(value) + def insert_gateHHInstantaneous_at(self, index, value): + self.gate_hh_instantaneouses.insert(index, value) + def replace_gateHHInstantaneous_at(self, index, value): + self.gate_hh_instantaneouses[index] = value + def get_gateFractional(self): + return self.gate_fractionals + def set_gateFractional(self, gate_fractionals): + self.gate_fractionals = gate_fractionals + def add_gateFractional(self, value): + self.gate_fractionals.append(value) + def insert_gateFractional_at(self, index, value): + self.gate_fractionals.insert(index, value) + def replace_gateFractional_at(self, index, value): + self.gate_fractionals[index] = value + def get_species(self): + return self.species + def set_species(self, species): + self.species = species + def get_type(self): + return self.type + def set_type(self, type): + self.type = type + def get_conductance(self): + return self.conductance + def set_conductance(self, conductance): + self.conductance = conductance + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] def validate_channelTypes(self, value): # Validate type channelTypes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False + value = value enumerations = ['ionChannelPassive', 'ionChannelHH'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on channelTypes' % {"value" : value.encode("utf-8")} ) + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on channelTypes' % {"value" : encode_str_2_3(value), "lineno": lineno} ) + result = False def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductance_patterns_, )) + validate_Nml2Quantity_conductance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$']] + def _hasContent(self): if ( self.gates or self.gate_hh_rates or @@ -19934,12 +27132,12 @@ def hasContent_(self): self.gate_h_hrates_tau_infs or self.gate_hh_instantaneouses or self.gate_fractionals or - super(IonChannel, self).hasContent_() + super(IonChannel, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannel', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='IonChannel', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannel') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -19947,64 +27145,82 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IonChannel': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannel') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannel') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannel', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannel', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannel'): - super(IonChannel, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannel') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannel'): + super(IonChannel, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannel') if self.species is not None and 'species' not in already_processed: already_processed.add('species') - outfile.write(' species=%s' % (quote_attrib(self.species), )) + outfile.write(' species=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.species), input_name='species')), )) if self.type is not None and 'type' not in already_processed: already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) + outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) if self.conductance is not None and 'conductance' not in already_processed: already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) + outfile.write(' conductance=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.conductance), input_name='conductance')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannel', fromsubclass_=False, pretty_print=True): - super(IonChannel, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='IonChannel', fromsubclass_=False, pretty_print=True): + super(IonChannel, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for gate_ in self.gates: + namespaceprefix_ = self.gates_nsprefix_ + ':' if (UseCapturedNS_ and self.gates_nsprefix_) else '' gate_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gate', pretty_print=pretty_print) for gateHHrates_ in self.gate_hh_rates: + namespaceprefix_ = self.gate_hh_rates_nsprefix_ + ':' if (UseCapturedNS_ and self.gate_hh_rates_nsprefix_) else '' gateHHrates_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHrates', pretty_print=pretty_print) for gateHHratesTau_ in self.gate_h_hrates_taus: + namespaceprefix_ = self.gate_h_hrates_taus_nsprefix_ + ':' if (UseCapturedNS_ and self.gate_h_hrates_taus_nsprefix_) else '' gateHHratesTau_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHratesTau', pretty_print=pretty_print) for gateHHtauInf_ in self.gate_hh_tau_infs: + namespaceprefix_ = self.gate_hh_tau_infs_nsprefix_ + ':' if (UseCapturedNS_ and self.gate_hh_tau_infs_nsprefix_) else '' gateHHtauInf_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHtauInf', pretty_print=pretty_print) for gateHHratesInf_ in self.gate_h_hrates_infs: + namespaceprefix_ = self.gate_h_hrates_infs_nsprefix_ + ':' if (UseCapturedNS_ and self.gate_h_hrates_infs_nsprefix_) else '' gateHHratesInf_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHratesInf', pretty_print=pretty_print) for gateHHratesTauInf_ in self.gate_h_hrates_tau_infs: + namespaceprefix_ = self.gate_h_hrates_tau_infs_nsprefix_ + ':' if (UseCapturedNS_ and self.gate_h_hrates_tau_infs_nsprefix_) else '' gateHHratesTauInf_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHratesTauInf', pretty_print=pretty_print) for gateHHInstantaneous_ in self.gate_hh_instantaneouses: + namespaceprefix_ = self.gate_hh_instantaneouses_nsprefix_ + ':' if (UseCapturedNS_ and self.gate_hh_instantaneouses_nsprefix_) else '' gateHHInstantaneous_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHInstantaneous', pretty_print=pretty_print) for gateFractional_ in self.gate_fractionals: + namespaceprefix_ = self.gate_fractionals_nsprefix_ + ':' if (UseCapturedNS_ and self.gate_fractionals_nsprefix_) else '' gateFractional_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateFractional', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('species', node) if value is not None and 'species' not in already_processed: already_processed.add('species') @@ -20024,61 +27240,65 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(IonChannel, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + super(IonChannel, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'gate': obj_ = GateHHUndetermined.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gates.append(obj_) obj_.original_tagname_ = 'gate' elif nodeName_ == 'gateHHrates': obj_ = GateHHRates.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_hh_rates.append(obj_) obj_.original_tagname_ = 'gateHHrates' elif nodeName_ == 'gateHHratesTau': obj_ = GateHHRatesTau.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_h_hrates_taus.append(obj_) obj_.original_tagname_ = 'gateHHratesTau' elif nodeName_ == 'gateHHtauInf': obj_ = GateHHTauInf.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_hh_tau_infs.append(obj_) obj_.original_tagname_ = 'gateHHtauInf' elif nodeName_ == 'gateHHratesInf': obj_ = GateHHRatesInf.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_h_hrates_infs.append(obj_) obj_.original_tagname_ = 'gateHHratesInf' elif nodeName_ == 'gateHHratesTauInf': obj_ = GateHHRatesTauInf.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_h_hrates_tau_infs.append(obj_) obj_.original_tagname_ = 'gateHHratesTauInf' elif nodeName_ == 'gateHHInstantaneous': obj_ = GateHHInstantaneous.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_hh_instantaneouses.append(obj_) obj_.original_tagname_ = 'gateHHInstantaneous' elif nodeName_ == 'gateFractional': obj_ = GateFractional.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_fractionals.append(obj_) obj_.original_tagname_ = 'gateFractional' - super(IonChannel, self).buildChildren(child_, node, nodeName_, True) + super(IonChannel, self)._buildChildren(child_, node, nodeName_, True) # end class IonChannel class AlphaCurrSynapse(BasePynnSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = BasePynnSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(AlphaCurrSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) + self.ns_prefix_ = None + super(globals().get("AlphaCurrSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -20090,9 +27310,13 @@ def factory(*args_, **kwargs_): else: return AlphaCurrSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(AlphaCurrSynapse, self).hasContent_() + super(AlphaCurrSynapse, self)._hasContent() ): return True else: @@ -20105,47 +27329,57 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='A eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'AlphaCurrSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaCurrSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaCurrSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaCurrSynapse'): - super(AlphaCurrSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrSynapse') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCurrSynapse', fromsubclass_=False, pretty_print=True): - super(AlphaCurrSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaCurrSynapse'): + super(AlphaCurrSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrSynapse') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCurrSynapse', fromsubclass_=False, pretty_print=True): + super(AlphaCurrSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(AlphaCurrSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(AlphaCurrSynapse, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(AlphaCurrSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(AlphaCurrSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class AlphaCurrSynapse class ExpCurrSynapse(BasePynnSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = BasePynnSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ExpCurrSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ExpCurrSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -20157,9 +27391,13 @@ def factory(*args_, **kwargs_): else: return ExpCurrSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(ExpCurrSynapse, self).hasContent_() + super(ExpCurrSynapse, self)._hasContent() ): return True else: @@ -20172,49 +27410,60 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ExpCurrSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCurrSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCurrSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpCurrSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpCurrSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpCurrSynapse'): - super(ExpCurrSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCurrSynapse') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpCurrSynapse', fromsubclass_=False, pretty_print=True): - super(ExpCurrSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpCurrSynapse'): + super(ExpCurrSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCurrSynapse') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpCurrSynapse', fromsubclass_=False, pretty_print=True): + super(ExpCurrSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ExpCurrSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ExpCurrSynapse, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(ExpCurrSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ExpCurrSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class ExpCurrSynapse class AlphaCondSynapse(BasePynnSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('e_rev', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_('e_rev', 'xs:float', 0, 0, {'use': 'required', 'name': 'e_rev'}), ] subclass = None superclass = BasePynnSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, e_rev=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, e_rev=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(AlphaCondSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) + self.ns_prefix_ = None + super(globals().get("AlphaCondSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) self.e_rev = _cast(float, e_rev) + self.e_rev_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -20226,9 +27475,17 @@ def factory(*args_, **kwargs_): else: return AlphaCondSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_e_rev(self): + return self.e_rev + def set_e_rev(self, e_rev): + self.e_rev = e_rev + def _hasContent(self): if ( - super(AlphaCondSynapse, self).hasContent_() + super(AlphaCondSynapse, self)._hasContent() ): return True else: @@ -20241,59 +27498,68 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='A eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'AlphaCondSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCondSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCondSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaCondSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaCondSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaCondSynapse'): - super(AlphaCondSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCondSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaCondSynapse'): + super(AlphaCondSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCondSynapse') if self.e_rev is not None and 'e_rev' not in already_processed: already_processed.add('e_rev') outfile.write(' e_rev="%s"' % self.gds_format_float(self.e_rev, input_name='e_rev')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCondSynapse', fromsubclass_=False, pretty_print=True): - super(AlphaCondSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCondSynapse', fromsubclass_=False, pretty_print=True): + super(AlphaCondSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('e_rev', node) if value is not None and 'e_rev' not in already_processed: already_processed.add('e_rev') - try: - self.e_rev = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev): %s' % exp) - super(AlphaCondSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(AlphaCondSynapse, self).buildChildren(child_, node, nodeName_, True) + value = self.gds_parse_float(value, node, 'e_rev') + self.e_rev = value + super(AlphaCondSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(AlphaCondSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class AlphaCondSynapse class ExpCondSynapse(BasePynnSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('e_rev', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_('e_rev', 'xs:float', 0, 0, {'use': 'required', 'name': 'e_rev'}), ] subclass = None superclass = BasePynnSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, e_rev=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, e_rev=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ExpCondSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ExpCondSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) self.e_rev = _cast(float, e_rev) + self.e_rev_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -20305,9 +27571,17 @@ def factory(*args_, **kwargs_): else: return ExpCondSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_e_rev(self): + return self.e_rev + def set_e_rev(self, e_rev): + self.e_rev = e_rev + def _hasContent(self): if ( - super(ExpCondSynapse, self).hasContent_() + super(ExpCondSynapse, self)._hasContent() ): return True else: @@ -20320,75 +27594,92 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ExpCondSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCondSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCondSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpCondSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpCondSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpCondSynapse'): - super(ExpCondSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCondSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpCondSynapse'): + super(ExpCondSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCondSynapse') if self.e_rev is not None and 'e_rev' not in already_processed: already_processed.add('e_rev') outfile.write(' e_rev="%s"' % self.gds_format_float(self.e_rev, input_name='e_rev')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpCondSynapse', fromsubclass_=False, pretty_print=True): - super(ExpCondSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpCondSynapse', fromsubclass_=False, pretty_print=True): + super(ExpCondSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('e_rev', node) if value is not None and 'e_rev' not in already_processed: already_processed.add('e_rev') - try: - self.e_rev = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev): %s' % exp) - super(ExpCondSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ExpCondSynapse, self).buildChildren(child_, node, nodeName_, True) + value = self.gds_parse_float(value, node, 'e_rev') + self.e_rev = value + super(ExpCondSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ExpCondSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class ExpCondSynapse class HH_cond_exp(basePyNNCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('v_offset', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_E', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_I', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_K', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_Na', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_leak', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('g_leak', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('gbar_K', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('gbar_Na', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_('v_offset', 'xs:float', 0, 0, {'use': 'required', 'name': 'v_offset'}), + MemberSpec_('e_rev_E', 'xs:float', 0, 0, {'use': 'required', 'name': 'e_rev_E'}), + MemberSpec_('e_rev_I', 'xs:float', 0, 0, {'use': 'required', 'name': 'e_rev_I'}), + MemberSpec_('e_rev_K', 'xs:float', 0, 0, {'use': 'required', 'name': 'e_rev_K'}), + MemberSpec_('e_rev_Na', 'xs:float', 0, 0, {'use': 'required', 'name': 'e_rev_Na'}), + MemberSpec_('e_rev_leak', 'xs:float', 0, 0, {'use': 'required', 'name': 'e_rev_leak'}), + MemberSpec_('g_leak', 'xs:float', 0, 0, {'use': 'required', 'name': 'g_leak'}), + MemberSpec_('gbar_K', 'xs:float', 0, 0, {'use': 'required', 'name': 'gbar_K'}), + MemberSpec_('gbar_Na', 'xs:float', 0, 0, {'use': 'required', 'name': 'gbar_Na'}), ] subclass = None superclass = basePyNNCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, v_offset=None, e_rev_E=None, e_rev_I=None, e_rev_K=None, e_rev_Na=None, e_rev_leak=None, g_leak=None, gbar_K=None, gbar_Na=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, v_offset=None, e_rev_E=None, e_rev_I=None, e_rev_K=None, e_rev_Na=None, e_rev_leak=None, g_leak=None, gbar_K=None, gbar_Na=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(HH_cond_exp, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, **kwargs_) + self.ns_prefix_ = None + super(globals().get("HH_cond_exp"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, **kwargs_) self.v_offset = _cast(float, v_offset) + self.v_offset_nsprefix_ = None self.e_rev_E = _cast(float, e_rev_E) + self.e_rev_E_nsprefix_ = None self.e_rev_I = _cast(float, e_rev_I) + self.e_rev_I_nsprefix_ = None self.e_rev_K = _cast(float, e_rev_K) + self.e_rev_K_nsprefix_ = None self.e_rev_Na = _cast(float, e_rev_Na) + self.e_rev_Na_nsprefix_ = None self.e_rev_leak = _cast(float, e_rev_leak) + self.e_rev_leak_nsprefix_ = None self.g_leak = _cast(float, g_leak) + self.g_leak_nsprefix_ = None self.gbar_K = _cast(float, gbar_K) + self.gbar_K_nsprefix_ = None self.gbar_Na = _cast(float, gbar_Na) + self.gbar_Na_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -20400,9 +27691,49 @@ def factory(*args_, **kwargs_): else: return HH_cond_exp(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_v_offset(self): + return self.v_offset + def set_v_offset(self, v_offset): + self.v_offset = v_offset + def get_e_rev_E(self): + return self.e_rev_E + def set_e_rev_E(self, e_rev_E): + self.e_rev_E = e_rev_E + def get_e_rev_I(self): + return self.e_rev_I + def set_e_rev_I(self, e_rev_I): + self.e_rev_I = e_rev_I + def get_e_rev_K(self): + return self.e_rev_K + def set_e_rev_K(self, e_rev_K): + self.e_rev_K = e_rev_K + def get_e_rev_Na(self): + return self.e_rev_Na + def set_e_rev_Na(self, e_rev_Na): + self.e_rev_Na = e_rev_Na + def get_e_rev_leak(self): + return self.e_rev_leak + def set_e_rev_leak(self, e_rev_leak): + self.e_rev_leak = e_rev_leak + def get_g_leak(self): + return self.g_leak + def set_g_leak(self, g_leak): + self.g_leak = g_leak + def get_gbar_K(self): + return self.gbar_K + def set_gbar_K(self, gbar_K): + self.gbar_K = gbar_K + def get_gbar_Na(self): + return self.gbar_Na + def set_gbar_Na(self, gbar_Na): + self.gbar_Na = gbar_Na + def _hasContent(self): if ( - super(HH_cond_exp, self).hasContent_() + super(HH_cond_exp, self)._hasContent() ): return True else: @@ -20415,21 +27746,23 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='H eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'HH_cond_exp': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HH_cond_exp') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HH_cond_exp') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HH_cond_exp', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HH_cond_exp', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HH_cond_exp'): - super(HH_cond_exp, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HH_cond_exp') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HH_cond_exp'): + super(HH_cond_exp, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HH_cond_exp') if self.v_offset is not None and 'v_offset' not in already_processed: already_processed.add('v_offset') outfile.write(' v_offset="%s"' % self.gds_format_float(self.v_offset, input_name='v_offset')) @@ -20457,105 +27790,100 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.gbar_Na is not None and 'gbar_Na' not in already_processed: already_processed.add('gbar_Na') outfile.write(' gbar_Na="%s"' % self.gds_format_float(self.gbar_Na, input_name='gbar_Na')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HH_cond_exp', fromsubclass_=False, pretty_print=True): - super(HH_cond_exp, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HH_cond_exp', fromsubclass_=False, pretty_print=True): + super(HH_cond_exp, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('v_offset', node) if value is not None and 'v_offset' not in already_processed: already_processed.add('v_offset') - try: - self.v_offset = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (v_offset): %s' % exp) + value = self.gds_parse_float(value, node, 'v_offset') + self.v_offset = value value = find_attr_value_('e_rev_E', node) if value is not None and 'e_rev_E' not in already_processed: already_processed.add('e_rev_E') - try: - self.e_rev_E = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_E): %s' % exp) + value = self.gds_parse_float(value, node, 'e_rev_E') + self.e_rev_E = value value = find_attr_value_('e_rev_I', node) if value is not None and 'e_rev_I' not in already_processed: already_processed.add('e_rev_I') - try: - self.e_rev_I = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_I): %s' % exp) + value = self.gds_parse_float(value, node, 'e_rev_I') + self.e_rev_I = value value = find_attr_value_('e_rev_K', node) if value is not None and 'e_rev_K' not in already_processed: already_processed.add('e_rev_K') - try: - self.e_rev_K = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_K): %s' % exp) + value = self.gds_parse_float(value, node, 'e_rev_K') + self.e_rev_K = value value = find_attr_value_('e_rev_Na', node) if value is not None and 'e_rev_Na' not in already_processed: already_processed.add('e_rev_Na') - try: - self.e_rev_Na = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_Na): %s' % exp) + value = self.gds_parse_float(value, node, 'e_rev_Na') + self.e_rev_Na = value value = find_attr_value_('e_rev_leak', node) if value is not None and 'e_rev_leak' not in already_processed: already_processed.add('e_rev_leak') - try: - self.e_rev_leak = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_leak): %s' % exp) + value = self.gds_parse_float(value, node, 'e_rev_leak') + self.e_rev_leak = value value = find_attr_value_('g_leak', node) if value is not None and 'g_leak' not in already_processed: already_processed.add('g_leak') - try: - self.g_leak = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (g_leak): %s' % exp) + value = self.gds_parse_float(value, node, 'g_leak') + self.g_leak = value value = find_attr_value_('gbar_K', node) if value is not None and 'gbar_K' not in already_processed: already_processed.add('gbar_K') - try: - self.gbar_K = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (gbar_K): %s' % exp) + value = self.gds_parse_float(value, node, 'gbar_K') + self.gbar_K = value value = find_attr_value_('gbar_Na', node) if value is not None and 'gbar_Na' not in already_processed: already_processed.add('gbar_Na') - try: - self.gbar_Na = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (gbar_Na): %s' % exp) - super(HH_cond_exp, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(HH_cond_exp, self).buildChildren(child_, node, nodeName_, True) + value = self.gds_parse_float(value, node, 'gbar_Na') + self.gbar_Na = value + super(HH_cond_exp, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(HH_cond_exp, self)._buildChildren(child_, node, nodeName_, True) pass # end class HH_cond_exp class basePyNNIaFCell(basePyNNCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau_m', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('tau_refrac', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_reset', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_rest', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_thresh', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_('tau_m', 'xs:float', 0, 0, {'use': 'required', 'name': 'tau_m'}), + MemberSpec_('tau_refrac', 'xs:float', 0, 0, {'use': 'required', 'name': 'tau_refrac'}), + MemberSpec_('v_reset', 'xs:float', 0, 0, {'use': 'required', 'name': 'v_reset'}), + MemberSpec_('v_rest', 'xs:float', 0, 0, {'use': 'required', 'name': 'v_rest'}), + MemberSpec_('v_thresh', 'xs:float', 0, 0, {'use': 'required', 'name': 'v_thresh'}), ] subclass = None superclass = basePyNNCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(basePyNNIaFCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("basePyNNIaFCell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, extensiontype_, **kwargs_) self.tau_m = _cast(float, tau_m) + self.tau_m_nsprefix_ = None self.tau_refrac = _cast(float, tau_refrac) + self.tau_refrac_nsprefix_ = None self.v_reset = _cast(float, v_reset) + self.v_reset_nsprefix_ = None self.v_rest = _cast(float, v_rest) + self.v_rest_nsprefix_ = None self.v_thresh = _cast(float, v_thresh) + self.v_thresh_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -20568,9 +27896,35 @@ def factory(*args_, **kwargs_): else: return basePyNNIaFCell(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_tau_m(self): + return self.tau_m + def set_tau_m(self, tau_m): + self.tau_m = tau_m + def get_tau_refrac(self): + return self.tau_refrac + def set_tau_refrac(self, tau_refrac): + self.tau_refrac = tau_refrac + def get_v_reset(self): + return self.v_reset + def set_v_reset(self, v_reset): + self.v_reset = v_reset + def get_v_rest(self): + return self.v_rest + def set_v_rest(self, v_rest): + self.v_rest = v_rest + def get_v_thresh(self): + return self.v_thresh + def set_v_thresh(self, v_thresh): + self.v_thresh = v_thresh + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( - super(basePyNNIaFCell, self).hasContent_() + super(basePyNNIaFCell, self)._hasContent() ): return True else: @@ -20583,21 +27937,23 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='b eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'basePyNNIaFCell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='basePyNNIaFCell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='basePyNNIaFCell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='basePyNNIaFCell'): - super(basePyNNIaFCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='basePyNNIaFCell'): + super(basePyNNIaFCell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCell') if self.tau_m is not None and 'tau_m' not in already_processed: already_processed.add('tau_m') outfile.write(' tau_m="%s"' % self.gds_format_float(self.tau_m, input_name='tau_m')) @@ -20616,77 +27972,83 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNIaFCell', fromsubclass_=False, pretty_print=True): - super(basePyNNIaFCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNIaFCell', fromsubclass_=False, pretty_print=True): + super(basePyNNIaFCell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('tau_m', node) if value is not None and 'tau_m' not in already_processed: already_processed.add('tau_m') - try: - self.tau_m = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_m): %s' % exp) + value = self.gds_parse_float(value, node, 'tau_m') + self.tau_m = value value = find_attr_value_('tau_refrac', node) if value is not None and 'tau_refrac' not in already_processed: already_processed.add('tau_refrac') - try: - self.tau_refrac = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_refrac): %s' % exp) + value = self.gds_parse_float(value, node, 'tau_refrac') + self.tau_refrac = value value = find_attr_value_('v_reset', node) if value is not None and 'v_reset' not in already_processed: already_processed.add('v_reset') - try: - self.v_reset = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (v_reset): %s' % exp) + value = self.gds_parse_float(value, node, 'v_reset') + self.v_reset = value value = find_attr_value_('v_rest', node) if value is not None and 'v_rest' not in already_processed: already_processed.add('v_rest') - try: - self.v_rest = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (v_rest): %s' % exp) + value = self.gds_parse_float(value, node, 'v_rest') + self.v_rest = value value = find_attr_value_('v_thresh', node) if value is not None and 'v_thresh' not in already_processed: already_processed.add('v_thresh') - try: - self.v_thresh = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (v_thresh): %s' % exp) + value = self.gds_parse_float(value, node, 'v_thresh') + self.v_thresh = value value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(basePyNNIaFCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(basePyNNIaFCell, self).buildChildren(child_, node, nodeName_, True) + super(basePyNNIaFCell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(basePyNNIaFCell, self)._buildChildren(child_, node, nodeName_, True) pass # end class basePyNNIaFCell class ContinuousConnection(BaseConnectionNewFormat): - """Individual continuous/analog synaptic connection""" + """ContinuousConnection -- Individual continuous/analog synaptic connection + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('pre_component', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('post_component', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_('pre_component', 'NmlId', 0, 0, {'use': 'required', 'name': 'pre_component'}), + MemberSpec_('post_component', 'NmlId', 0, 0, {'use': 'required', 'name': 'post_component'}), ] subclass = None superclass = BaseConnectionNewFormat - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', pre_component=None, post_component=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', pre_component=None, post_component=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ContinuousConnection, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ContinuousConnection"), self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, extensiontype_, **kwargs_) self.pre_component = _cast(None, pre_component) + self.pre_component_nsprefix_ = None self.post_component = _cast(None, post_component) + self.post_component_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -20699,16 +28061,34 @@ def factory(*args_, **kwargs_): else: return ContinuousConnection(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_preComponent(self): + return self.pre_component + def set_preComponent(self, pre_component): + self.pre_component = pre_component + def get_postComponent(self): + return self.post_component + def set_postComponent(self, post_component): + self.post_component = post_component + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( - super(ContinuousConnection, self).hasContent_() + super(ContinuousConnection, self)._hasContent() ): return True else: @@ -20721,41 +28101,51 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ContinuousConnection': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnection') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnection') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousConnection', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousConnection', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousConnection'): - super(ContinuousConnection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnection') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousConnection'): + super(ContinuousConnection, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnection') if self.pre_component is not None and 'pre_component' not in already_processed: already_processed.add('pre_component') - outfile.write(' preComponent=%s' % (quote_attrib(self.pre_component), )) + outfile.write(' preComponent=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.pre_component), input_name='preComponent')), )) if self.post_component is not None and 'post_component' not in already_processed: already_processed.add('post_component') - outfile.write(' postComponent=%s' % (quote_attrib(self.post_component), )) + outfile.write(' postComponent=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.post_component), input_name='postComponent')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnection', fromsubclass_=False, pretty_print=True): - super(ContinuousConnection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnection', fromsubclass_=False, pretty_print=True): + super(ContinuousConnection, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('preComponent', node) if value is not None and 'preComponent' not in already_processed: already_processed.add('preComponent') @@ -20770,9 +28160,9 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(ContinuousConnection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ContinuousConnection, self).buildChildren(child_, node, nodeName_, True) + super(ContinuousConnection, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ContinuousConnection, self)._buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): @@ -20846,17 +28236,24 @@ def __str__(self): class ElectricalConnection(BaseConnectionNewFormat): - """Individual electrical synaptic connection""" + """ElectricalConnection -- Individual electrical synaptic connection + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('synapse', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_('synapse', 'NmlId', 0, 0, {'use': 'required', 'name': 'synapse'}), ] subclass = None superclass = BaseConnectionNewFormat - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', synapse=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', synapse=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ElectricalConnection, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ElectricalConnection"), self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, extensiontype_, **kwargs_) self.synapse = _cast(None, synapse) + self.synapse_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -20869,16 +28266,30 @@ def factory(*args_, **kwargs_): else: return ElectricalConnection(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_synapse(self): + return self.synapse + def set_synapse(self, synapse): + self.synapse = synapse + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( - super(ElectricalConnection, self).hasContent_() + super(ElectricalConnection, self)._hasContent() ): return True else: @@ -20891,38 +28302,48 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ElectricalConnection': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnection') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnection') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalConnection', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalConnection', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalConnection'): - super(ElectricalConnection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnection') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalConnection'): + super(ElectricalConnection, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnection') if self.synapse is not None and 'synapse' not in already_processed: already_processed.add('synapse') - outfile.write(' synapse=%s' % (quote_attrib(self.synapse), )) + outfile.write(' synapse=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse), input_name='synapse')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnection', fromsubclass_=False, pretty_print=True): - super(ElectricalConnection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnection', fromsubclass_=False, pretty_print=True): + super(ElectricalConnection, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('synapse', node) if value is not None and 'synapse' not in already_processed: already_processed.add('synapse') @@ -20932,9 +28353,9 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(ElectricalConnection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ElectricalConnection, self).buildChildren(child_, node, nodeName_, True) + super(ElectricalConnection, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ElectricalConnection, self)._buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): @@ -21007,19 +28428,27 @@ def __str__(self): class ConnectionWD(BaseConnectionOldFormat): - """Individual synaptic connection with weight and delay""" + """ConnectionWD -- Individual synaptic connection with weight and delay + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('weight', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('weight', 'xs:float', 0, 0, {'use': 'required', 'name': 'weight'}), + MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'delay'}), ] subclass = None superclass = BaseConnectionOldFormat - def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id='0', pre_fraction_along='0.5', post_cell_id=None, post_segment_id='0', post_fraction_along='0.5', weight=None, delay=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id='0', pre_fraction_along='0.5', post_cell_id=None, post_segment_id='0', post_fraction_along='0.5', weight=None, delay=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ConnectionWD, self).__init__(neuro_lex_id, id, pre_cell_id, pre_segment_id, pre_fraction_along, post_cell_id, post_segment_id, post_fraction_along, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ConnectionWD"), self).__init__(neuro_lex_id, id, pre_cell_id, pre_segment_id, pre_fraction_along, post_cell_id, post_segment_id, post_fraction_along, **kwargs_) self.weight = _cast(float, weight) + self.weight_nsprefix_ = None self.delay = _cast(None, delay) + self.delay_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -21031,16 +28460,32 @@ def factory(*args_, **kwargs_): else: return ConnectionWD(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_weight(self): + return self.weight + def set_weight(self, weight): + self.weight = weight + def get_delay(self): + return self.delay + def set_delay(self, delay): + self.delay = delay def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(ConnectionWD, self).hasContent_() + super(ConnectionWD, self)._hasContent() ): return True else: @@ -21053,52 +28498,56 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ConnectionWD': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConnectionWD') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConnectionWD') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ConnectionWD', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ConnectionWD', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ConnectionWD'): - super(ConnectionWD, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConnectionWD') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ConnectionWD'): + super(ConnectionWD, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConnectionWD') if self.weight is not None and 'weight' not in already_processed: already_processed.add('weight') outfile.write(' weight="%s"' % self.gds_format_float(self.weight, input_name='weight')) if self.delay is not None and 'delay' not in already_processed: already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConnectionWD', fromsubclass_=False, pretty_print=True): - super(ConnectionWD, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(' delay=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.delay), input_name='delay')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConnectionWD', fromsubclass_=False, pretty_print=True): + super(ConnectionWD, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('weight', node) if value is not None and 'weight' not in already_processed: already_processed.add('weight') - try: - self.weight = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (weight): %s' % exp) + value = self.gds_parse_float(value, node, 'weight') + self.weight = value value = find_attr_value_('delay', node) if value is not None and 'delay' not in already_processed: already_processed.add('delay') self.delay = value self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - super(ConnectionWD, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ConnectionWD, self).buildChildren(child_, node, nodeName_, True) + super(ConnectionWD, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ConnectionWD, self)._buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): @@ -21188,16 +28637,21 @@ def get_delay_in_ms(self): class Connection(BaseConnectionOldFormat): - """Individual chemical (event based) synaptic connection, weight==1 and - no delay""" + """Connection -- Individual chemical (event based) synaptic connection, weight==1 and no delay + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = BaseConnectionOldFormat - def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id='0', pre_fraction_along='0.5', post_cell_id=None, post_segment_id='0', post_fraction_along='0.5', **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id='0', pre_fraction_along='0.5', post_cell_id=None, post_segment_id='0', post_fraction_along='0.5', gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Connection, self).__init__(neuro_lex_id, id, pre_cell_id, pre_segment_id, pre_fraction_along, post_cell_id, post_segment_id, post_fraction_along, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Connection"), self).__init__(neuro_lex_id, id, pre_cell_id, pre_segment_id, pre_fraction_along, post_cell_id, post_segment_id, post_fraction_along, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -21209,9 +28663,13 @@ def factory(*args_, **kwargs_): else: return Connection(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(Connection, self).hasContent_() + super(Connection, self)._hasContent() ): return True else: @@ -21224,34 +28682,40 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Connection': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Connection') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Connection') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Connection', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Connection', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Connection'): - super(Connection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Connection') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Connection', fromsubclass_=False, pretty_print=True): - super(Connection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Connection'): + super(Connection, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Connection') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Connection', fromsubclass_=False, pretty_print=True): + super(Connection, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(Connection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Connection, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(Connection, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(Connection, self)._buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): @@ -21325,16 +28789,21 @@ def __str__(self): class Cell2CaPools(Cell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('biophysical_properties2_ca_pools', 'BiophysicalProperties2CaPools', 0, 1, {u'type': u'BiophysicalProperties2CaPools', u'name': u'biophysicalProperties2CaPools', u'minOccurs': u'0'}, None), + MemberSpec_('biophysical_properties2_ca_pools', 'BiophysicalProperties2CaPools', 0, 1, {'minOccurs': '0', 'name': 'biophysicalProperties2CaPools', 'type': 'BiophysicalProperties2CaPools'}, None), ] subclass = None superclass = Cell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, morphology_attr=None, biophysical_properties_attr=None, morphology=None, biophysical_properties=None, biophysical_properties2_ca_pools=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, morphology_attr=None, biophysical_properties_attr=None, morphology=None, biophysical_properties=None, biophysical_properties2_ca_pools=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Cell2CaPools, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, morphology_attr, biophysical_properties_attr, morphology, biophysical_properties, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Cell2CaPools"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, morphology_attr, biophysical_properties_attr, morphology, biophysical_properties, **kwargs_) self.biophysical_properties2_ca_pools = biophysical_properties2_ca_pools + self.biophysical_properties2_ca_pools_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -21346,15 +28815,23 @@ def factory(*args_, **kwargs_): else: return Cell2CaPools(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_biophysicalProperties2CaPools(self): + return self.biophysical_properties2_ca_pools + def set_biophysicalProperties2CaPools(self, biophysical_properties2_ca_pools): + self.biophysical_properties2_ca_pools = biophysical_properties2_ca_pools + def _hasContent(self): if ( self.biophysical_properties2_ca_pools is not None or - super(Cell2CaPools, self).hasContent_() + super(Cell2CaPools, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Cell2CaPools', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Cell2CaPools', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('Cell2CaPools') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -21362,77 +28839,98 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Cell2CaPools': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell2CaPools') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell2CaPools') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Cell2CaPools', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Cell2CaPools', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Cell2CaPools'): - super(Cell2CaPools, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell2CaPools') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Cell2CaPools', fromsubclass_=False, pretty_print=True): - super(Cell2CaPools, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Cell2CaPools'): + super(Cell2CaPools, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell2CaPools') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='Cell2CaPools', fromsubclass_=False, pretty_print=True): + super(Cell2CaPools, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.biophysical_properties2_ca_pools is not None: + namespaceprefix_ = self.biophysical_properties2_ca_pools_nsprefix_ + ':' if (UseCapturedNS_ and self.biophysical_properties2_ca_pools_nsprefix_) else '' self.biophysical_properties2_ca_pools.export(outfile, level, namespaceprefix_, namespacedef_='', name_='biophysicalProperties2CaPools', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(Cell2CaPools, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(Cell2CaPools, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'biophysicalProperties2CaPools': obj_ = BiophysicalProperties2CaPools.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.biophysical_properties2_ca_pools = obj_ obj_.original_tagname_ = 'biophysicalProperties2CaPools' - super(Cell2CaPools, self).buildChildren(child_, node, nodeName_, True) + super(Cell2CaPools, self)._buildChildren(child_, node, nodeName_, True) # end class Cell2CaPools class AdExIaFCell(BaseCellMembPotCap): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('g_l', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('EL', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('reset', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('VT', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('del_t', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('tauw', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('refract', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('a', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_('g_l', 'Nml2Quantity_conductance', 0, 0, {'use': 'required', 'name': 'g_l'}), + MemberSpec_('EL', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'EL'}), + MemberSpec_('reset', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'reset'}), + MemberSpec_('VT', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'VT'}), + MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'thresh'}), + MemberSpec_('del_t', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'del_t'}), + MemberSpec_('tauw', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'tauw'}), + MemberSpec_('refract', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'refract'}), + MemberSpec_('a', 'Nml2Quantity_conductance', 0, 0, {'use': 'required', 'name': 'a'}), + MemberSpec_('b', 'Nml2Quantity_current', 0, 0, {'use': 'required', 'name': 'b'}), ] subclass = None superclass = BaseCellMembPotCap - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, C=None, g_l=None, EL=None, reset=None, VT=None, thresh=None, del_t=None, tauw=None, refract=None, a=None, b=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, C=None, g_l=None, EL=None, reset=None, VT=None, thresh=None, del_t=None, tauw=None, refract=None, a=None, b=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(AdExIaFCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, C, **kwargs_) + self.ns_prefix_ = None + super(globals().get("AdExIaFCell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, C, **kwargs_) self.g_l = _cast(None, g_l) + self.g_l_nsprefix_ = None self.EL = _cast(None, EL) + self.EL_nsprefix_ = None self.reset = _cast(None, reset) + self.reset_nsprefix_ = None self.VT = _cast(None, VT) + self.VT_nsprefix_ = None self.thresh = _cast(None, thresh) + self.thresh_nsprefix_ = None self.del_t = _cast(None, del_t) + self.del_t_nsprefix_ = None self.tauw = _cast(None, tauw) + self.tauw_nsprefix_ = None self.refract = _cast(None, refract) + self.refract_nsprefix_ = None self.a = _cast(None, a) + self.a_nsprefix_ = None self.b = _cast(None, b) + self.b_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -21444,37 +28942,97 @@ def factory(*args_, **kwargs_): else: return AdExIaFCell(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_gL(self): + return self.g_l + def set_gL(self, g_l): + self.g_l = g_l + def get_EL(self): + return self.EL + def set_EL(self, EL): + self.EL = EL + def get_reset(self): + return self.reset + def set_reset(self, reset): + self.reset = reset + def get_VT(self): + return self.VT + def set_VT(self, VT): + self.VT = VT + def get_thresh(self): + return self.thresh + def set_thresh(self, thresh): + self.thresh = thresh + def get_delT(self): + return self.del_t + def set_delT(self, del_t): + self.del_t = del_t + def get_tauw(self): + return self.tauw + def set_tauw(self, tauw): + self.tauw = tauw + def get_refract(self): + return self.refract + def set_refract(self, refract): + self.refract = refract + def get_a(self): + return self.a + def set_a(self, a): + self.a = a + def get_b(self): + return self.b + def set_b(self, b): + self.b = b def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductance_patterns_, )) + validate_Nml2Quantity_conductance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_current_patterns_, )) + validate_Nml2Quantity_current_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA))$']] + def _hasContent(self): if ( - super(AdExIaFCell, self).hasContent_() + super(AdExIaFCell, self)._hasContent() ): return True else: @@ -21487,61 +29045,67 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='A eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'AdExIaFCell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdExIaFCell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdExIaFCell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AdExIaFCell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AdExIaFCell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AdExIaFCell'): - super(AdExIaFCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdExIaFCell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AdExIaFCell'): + super(AdExIaFCell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdExIaFCell') if self.g_l is not None and 'g_l' not in already_processed: already_processed.add('g_l') - outfile.write(' gL=%s' % (quote_attrib(self.g_l), )) + outfile.write(' gL=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.g_l), input_name='gL')), )) if self.EL is not None and 'EL' not in already_processed: already_processed.add('EL') - outfile.write(' EL=%s' % (quote_attrib(self.EL), )) + outfile.write(' EL=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.EL), input_name='EL')), )) if self.reset is not None and 'reset' not in already_processed: already_processed.add('reset') - outfile.write(' reset=%s' % (quote_attrib(self.reset), )) + outfile.write(' reset=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.reset), input_name='reset')), )) if self.VT is not None and 'VT' not in already_processed: already_processed.add('VT') - outfile.write(' VT=%s' % (quote_attrib(self.VT), )) + outfile.write(' VT=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.VT), input_name='VT')), )) if self.thresh is not None and 'thresh' not in already_processed: already_processed.add('thresh') - outfile.write(' thresh=%s' % (quote_attrib(self.thresh), )) + outfile.write(' thresh=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.thresh), input_name='thresh')), )) if self.del_t is not None and 'del_t' not in already_processed: already_processed.add('del_t') - outfile.write(' delT=%s' % (quote_attrib(self.del_t), )) + outfile.write(' delT=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.del_t), input_name='delT')), )) if self.tauw is not None and 'tauw' not in already_processed: already_processed.add('tauw') - outfile.write(' tauw=%s' % (quote_attrib(self.tauw), )) + outfile.write(' tauw=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tauw), input_name='tauw')), )) if self.refract is not None and 'refract' not in already_processed: already_processed.add('refract') - outfile.write(' refract=%s' % (quote_attrib(self.refract), )) + outfile.write(' refract=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.refract), input_name='refract')), )) if self.a is not None and 'a' not in already_processed: already_processed.add('a') - outfile.write(' a=%s' % (quote_attrib(self.a), )) + outfile.write(' a=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.a), input_name='a')), )) if self.b is not None and 'b' not in already_processed: already_processed.add('b') - outfile.write(' b=%s' % (quote_attrib(self.b), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AdExIaFCell', fromsubclass_=False, pretty_print=True): - super(AdExIaFCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' b=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.b), input_name='b')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AdExIaFCell', fromsubclass_=False, pretty_print=True): + super(AdExIaFCell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('gL', node) if value is not None and 'gL' not in already_processed: already_processed.add('gL') @@ -21592,40 +29156,53 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('b') self.b = value self.validate_Nml2Quantity_current(self.b) # validate type Nml2Quantity_current - super(AdExIaFCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(AdExIaFCell, self).buildChildren(child_, node, nodeName_, True) + super(AdExIaFCell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(AdExIaFCell, self)._buildChildren(child_, node, nodeName_, True) pass # end class AdExIaFCell class Izhikevich2007Cell(BaseCellMembPotCap): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('v0', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('k', 'Nml2Quantity_conductancePerVoltage', 0, 0, {'use': u'required'}), - MemberSpec_('vr', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('vt', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('vpeak', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('a', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('c', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('d', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_('v0', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'v0'}), + MemberSpec_('k', 'Nml2Quantity_conductancePerVoltage', 0, 0, {'use': 'required', 'name': 'k'}), + MemberSpec_('vr', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'vr'}), + MemberSpec_('vt', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'vt'}), + MemberSpec_('vpeak', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'vpeak'}), + MemberSpec_('a', 'Nml2Quantity_pertime', 0, 0, {'use': 'required', 'name': 'a'}), + MemberSpec_('b', 'Nml2Quantity_conductance', 0, 0, {'use': 'required', 'name': 'b'}), + MemberSpec_('c', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'c'}), + MemberSpec_('d', 'Nml2Quantity_current', 0, 0, {'use': 'required', 'name': 'd'}), ] subclass = None superclass = BaseCellMembPotCap - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, C=None, v0=None, k=None, vr=None, vt=None, vpeak=None, a=None, b=None, c=None, d=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, C=None, v0=None, k=None, vr=None, vt=None, vpeak=None, a=None, b=None, c=None, d=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(Izhikevich2007Cell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, C, **kwargs_) + self.ns_prefix_ = None + super(globals().get("Izhikevich2007Cell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, C, **kwargs_) self.v0 = _cast(None, v0) + self.v0_nsprefix_ = None self.k = _cast(None, k) + self.k_nsprefix_ = None self.vr = _cast(None, vr) + self.vr_nsprefix_ = None self.vt = _cast(None, vt) + self.vt_nsprefix_ = None self.vpeak = _cast(None, vpeak) + self.vpeak_nsprefix_ = None self.a = _cast(None, a) + self.a_nsprefix_ = None self.b = _cast(None, b) + self.b_nsprefix_ = None self.c = _cast(None, c) + self.c_nsprefix_ = None self.d = _cast(None, d) + self.d_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -21637,44 +29214,104 @@ def factory(*args_, **kwargs_): else: return Izhikevich2007Cell(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_v0(self): + return self.v0 + def set_v0(self, v0): + self.v0 = v0 + def get_k(self): + return self.k + def set_k(self, k): + self.k = k + def get_vr(self): + return self.vr + def set_vr(self, vr): + self.vr = vr + def get_vt(self): + return self.vt + def set_vt(self, vt): + self.vt = vt + def get_vpeak(self): + return self.vpeak + def set_vpeak(self, vpeak): + self.vpeak = vpeak + def get_a(self): + return self.a + def set_a(self, a): + self.a = a + def get_b(self): + return self.b + def set_b(self, b): + self.b = b + def get_c(self): + return self.c + def set_c(self, c): + self.c = c + def get_d(self): + return self.d + def set_d(self, d): + self.d = d def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] def validate_Nml2Quantity_conductancePerVoltage(self, value): # Validate type Nml2Quantity_conductancePerVoltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductancePerVoltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductancePerVoltage_patterns_, )) - validate_Nml2Quantity_conductancePerVoltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_V|nS_per_mV)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductancePerVoltage_patterns_, )) + validate_Nml2Quantity_conductancePerVoltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_V|nS_per_mV))$']] def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_pertime_patterns_, )) + validate_Nml2Quantity_pertime_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$']] def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductance_patterns_, )) + validate_Nml2Quantity_conductance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$']] def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_current_patterns_, )) + validate_Nml2Quantity_current_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA))$']] + def _hasContent(self): if ( - super(Izhikevich2007Cell, self).hasContent_() + super(Izhikevich2007Cell, self)._hasContent() ): return True else: @@ -21687,58 +29324,64 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'Izhikevich2007Cell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Izhikevich2007Cell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Izhikevich2007Cell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Izhikevich2007Cell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Izhikevich2007Cell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Izhikevich2007Cell'): - super(Izhikevich2007Cell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Izhikevich2007Cell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Izhikevich2007Cell'): + super(Izhikevich2007Cell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Izhikevich2007Cell') if self.v0 is not None and 'v0' not in already_processed: already_processed.add('v0') - outfile.write(' v0=%s' % (quote_attrib(self.v0), )) + outfile.write(' v0=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.v0), input_name='v0')), )) if self.k is not None and 'k' not in already_processed: already_processed.add('k') - outfile.write(' k=%s' % (quote_attrib(self.k), )) + outfile.write(' k=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.k), input_name='k')), )) if self.vr is not None and 'vr' not in already_processed: already_processed.add('vr') - outfile.write(' vr=%s' % (quote_attrib(self.vr), )) + outfile.write(' vr=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.vr), input_name='vr')), )) if self.vt is not None and 'vt' not in already_processed: already_processed.add('vt') - outfile.write(' vt=%s' % (quote_attrib(self.vt), )) + outfile.write(' vt=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.vt), input_name='vt')), )) if self.vpeak is not None and 'vpeak' not in already_processed: already_processed.add('vpeak') - outfile.write(' vpeak=%s' % (quote_attrib(self.vpeak), )) + outfile.write(' vpeak=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.vpeak), input_name='vpeak')), )) if self.a is not None and 'a' not in already_processed: already_processed.add('a') - outfile.write(' a=%s' % (quote_attrib(self.a), )) + outfile.write(' a=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.a), input_name='a')), )) if self.b is not None and 'b' not in already_processed: already_processed.add('b') - outfile.write(' b=%s' % (quote_attrib(self.b), )) + outfile.write(' b=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.b), input_name='b')), )) if self.c is not None and 'c' not in already_processed: already_processed.add('c') - outfile.write(' c=%s' % (quote_attrib(self.c), )) + outfile.write(' c=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.c), input_name='c')), )) if self.d is not None and 'd' not in already_processed: already_processed.add('d') - outfile.write(' d=%s' % (quote_attrib(self.d), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Izhikevich2007Cell', fromsubclass_=False, pretty_print=True): - super(Izhikevich2007Cell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' d=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.d), input_name='d')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Izhikevich2007Cell', fromsubclass_=False, pretty_print=True): + super(Izhikevich2007Cell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('v0', node) if value is not None and 'v0' not in already_processed: already_processed.add('v0') @@ -21784,24 +29427,29 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('d') self.d = value self.validate_Nml2Quantity_current(self.d) # validate type Nml2Quantity_current - super(Izhikevich2007Cell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Izhikevich2007Cell, self).buildChildren(child_, node, nodeName_, True) + super(Izhikevich2007Cell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(Izhikevich2007Cell, self)._buildChildren(child_, node, nodeName_, True) pass # end class Izhikevich2007Cell class IafRefCell(IafCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('refract', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('refract', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'refract'}), ] subclass = None superclass = IafCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, C=None, leak_conductance=None, refract=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, C=None, leak_conductance=None, refract=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IafRefCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, leak_reversal, thresh, reset, C, leak_conductance, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IafRefCell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, leak_reversal, thresh, reset, C, leak_conductance, **kwargs_) self.refract = _cast(None, refract) + self.refract_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -21813,16 +29461,28 @@ def factory(*args_, **kwargs_): else: return IafRefCell(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_refract(self): + return self.refract + def set_refract(self, refract): + self.refract = refract def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(IafRefCell, self).hasContent_() + super(IafRefCell, self)._hasContent() ): return True else: @@ -21835,57 +29495,68 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IafRefCell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafRefCell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafRefCell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafRefCell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafRefCell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafRefCell'): - super(IafRefCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafRefCell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafRefCell'): + super(IafRefCell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafRefCell') if self.refract is not None and 'refract' not in already_processed: already_processed.add('refract') - outfile.write(' refract=%s' % (quote_attrib(self.refract), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafRefCell', fromsubclass_=False, pretty_print=True): - super(IafRefCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' refract=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.refract), input_name='refract')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafRefCell', fromsubclass_=False, pretty_print=True): + super(IafRefCell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('refract', node) if value is not None and 'refract' not in already_processed: already_processed.add('refract') self.refract = value self.validate_Nml2Quantity_time(self.refract) # validate type Nml2Quantity_time - super(IafRefCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IafRefCell, self).buildChildren(child_, node, nodeName_, True) + super(IafRefCell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(IafRefCell, self)._buildChildren(child_, node, nodeName_, True) pass # end class IafRefCell class IafTauRefCell(IafTauCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('refract', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('refract', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'refract'}), ] subclass = None superclass = IafTauCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, tau=None, refract=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, tau=None, refract=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IafTauRefCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, leak_reversal, thresh, reset, tau, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IafTauRefCell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, leak_reversal, thresh, reset, tau, **kwargs_) self.refract = _cast(None, refract) + self.refract_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -21897,16 +29568,28 @@ def factory(*args_, **kwargs_): else: return IafTauRefCell(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_refract(self): + return self.refract + def set_refract(self, refract): + self.refract = refract def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(IafTauRefCell, self).hasContent_() + super(IafTauRefCell, self)._hasContent() ): return True else: @@ -21919,63 +29602,77 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IafTauRefCell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauRefCell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauRefCell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafTauRefCell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafTauRefCell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafTauRefCell'): - super(IafTauRefCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauRefCell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafTauRefCell'): + super(IafTauRefCell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauRefCell') if self.refract is not None and 'refract' not in already_processed: already_processed.add('refract') - outfile.write(' refract=%s' % (quote_attrib(self.refract), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafTauRefCell', fromsubclass_=False, pretty_print=True): - super(IafTauRefCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' refract=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.refract), input_name='refract')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafTauRefCell', fromsubclass_=False, pretty_print=True): + super(IafTauRefCell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('refract', node) if value is not None and 'refract' not in already_processed: already_processed.add('refract') self.refract = value self.validate_Nml2Quantity_time(self.refract) # validate type Nml2Quantity_time - super(IafTauRefCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IafTauRefCell, self).buildChildren(child_, node, nodeName_, True) + super(IafTauRefCell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(IafTauRefCell, self)._buildChildren(child_, node, nodeName_, True) pass # end class IafTauRefCell class DoubleSynapse(BaseVoltageDepSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('synapse1', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('synapse2', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('synapse1_path', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('synapse2_path', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_('synapse1', 'NmlId', 0, 0, {'use': 'required', 'name': 'synapse1'}), + MemberSpec_('synapse2', 'NmlId', 0, 0, {'use': 'required', 'name': 'synapse2'}), + MemberSpec_('synapse1_path', 'xs:string', 0, 0, {'use': 'required', 'name': 'synapse1_path'}), + MemberSpec_('synapse2_path', 'xs:string', 0, 0, {'use': 'required', 'name': 'synapse2_path'}), ] subclass = None superclass = BaseVoltageDepSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, synapse1=None, synapse2=None, synapse1_path=None, synapse2_path=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, synapse1=None, synapse2=None, synapse1_path=None, synapse2_path=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(DoubleSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("DoubleSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.synapse1 = _cast(None, synapse1) + self.synapse1_nsprefix_ = None self.synapse2 = _cast(None, synapse2) + self.synapse2_nsprefix_ = None self.synapse1_path = _cast(None, synapse1_path) + self.synapse1_path_nsprefix_ = None self.synapse2_path = _cast(None, synapse2_path) + self.synapse2_path_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -21987,16 +29684,40 @@ def factory(*args_, **kwargs_): else: return DoubleSynapse(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_synapse1(self): + return self.synapse1 + def set_synapse1(self, synapse1): + self.synapse1 = synapse1 + def get_synapse2(self): + return self.synapse2 + def set_synapse2(self, synapse2): + self.synapse2 = synapse2 + def get_synapse1Path(self): + return self.synapse1_path + def set_synapse1Path(self, synapse1_path): + self.synapse1_path = synapse1_path + def get_synapse2Path(self): + return self.synapse2_path + def set_synapse2Path(self, synapse2_path): + self.synapse2_path = synapse2_path def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_NmlId_patterns_, )) + validate_NmlId_patterns_ = [['^([a-zA-Z_][a-zA-Z0-9_]*)$']] + def _hasContent(self): if ( - super(DoubleSynapse, self).hasContent_() + super(DoubleSynapse, self)._hasContent() ): return True else: @@ -22009,43 +29730,49 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='D eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'DoubleSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DoubleSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DoubleSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DoubleSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DoubleSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DoubleSynapse'): - super(DoubleSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DoubleSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DoubleSynapse'): + super(DoubleSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DoubleSynapse') if self.synapse1 is not None and 'synapse1' not in already_processed: already_processed.add('synapse1') - outfile.write(' synapse1=%s' % (quote_attrib(self.synapse1), )) + outfile.write(' synapse1=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse1), input_name='synapse1')), )) if self.synapse2 is not None and 'synapse2' not in already_processed: already_processed.add('synapse2') - outfile.write(' synapse2=%s' % (quote_attrib(self.synapse2), )) + outfile.write(' synapse2=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse2), input_name='synapse2')), )) if self.synapse1_path is not None and 'synapse1_path' not in already_processed: already_processed.add('synapse1_path') outfile.write(' synapse1Path=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse1_path), input_name='synapse1Path')), )) if self.synapse2_path is not None and 'synapse2_path' not in already_processed: already_processed.add('synapse2_path') outfile.write(' synapse2Path=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse2_path), input_name='synapse2Path')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DoubleSynapse', fromsubclass_=False, pretty_print=True): - super(DoubleSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DoubleSynapse', fromsubclass_=False, pretty_print=True): + super(DoubleSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('synapse1', node) if value is not None and 'synapse1' not in already_processed: already_processed.add('synapse1') @@ -22064,26 +29791,32 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'synapse2Path' not in already_processed: already_processed.add('synapse2Path') self.synapse2_path = value - super(DoubleSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(DoubleSynapse, self).buildChildren(child_, node, nodeName_, True) + super(DoubleSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(DoubleSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class DoubleSynapse class AlphaCurrentSynapse(BaseCurrentBasedSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('ibase', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_('tau', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'tau'}), + MemberSpec_('ibase', 'Nml2Quantity_current', 0, 0, {'use': 'required', 'name': 'ibase'}), ] subclass = None superclass = BaseCurrentBasedSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau=None, ibase=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau=None, ibase=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(AlphaCurrentSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.ns_prefix_ = None + super(globals().get("AlphaCurrentSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) self.tau = _cast(None, tau) + self.tau_nsprefix_ = None self.ibase = _cast(None, ibase) + self.ibase_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -22095,23 +29828,43 @@ def factory(*args_, **kwargs_): else: return AlphaCurrentSynapse(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_tau(self): + return self.tau + def set_tau(self, tau): + self.tau = tau + def get_ibase(self): + return self.ibase + def set_ibase(self, ibase): + self.ibase = ibase def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_current_patterns_, )) + validate_Nml2Quantity_current_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA))$']] + def _hasContent(self): if ( - super(AlphaCurrentSynapse, self).hasContent_() + super(AlphaCurrentSynapse, self)._hasContent() ): return True else: @@ -22124,37 +29877,43 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='A eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'AlphaCurrentSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrentSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrentSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaCurrentSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaCurrentSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaCurrentSynapse'): - super(AlphaCurrentSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrentSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaCurrentSynapse'): + super(AlphaCurrentSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrentSynapse') if self.tau is not None and 'tau' not in already_processed: already_processed.add('tau') - outfile.write(' tau=%s' % (quote_attrib(self.tau), )) + outfile.write(' tau=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tau), input_name='tau')), )) if self.ibase is not None and 'ibase' not in already_processed: already_processed.add('ibase') - outfile.write(' ibase=%s' % (quote_attrib(self.ibase), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCurrentSynapse', fromsubclass_=False, pretty_print=True): - super(AlphaCurrentSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' ibase=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ibase), input_name='ibase')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCurrentSynapse', fromsubclass_=False, pretty_print=True): + super(AlphaCurrentSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('tau', node) if value is not None and 'tau' not in already_processed: already_processed.add('tau') @@ -22165,28 +29924,35 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('ibase') self.ibase = value self.validate_Nml2Quantity_current(self.ibase) # validate type Nml2Quantity_current - super(AlphaCurrentSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(AlphaCurrentSynapse, self).buildChildren(child_, node, nodeName_, True) + super(AlphaCurrentSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(AlphaCurrentSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class AlphaCurrentSynapse class BaseConductanceBasedSynapseTwo(BaseVoltageDepSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('gbase1', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('gbase2', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_('gbase1', 'Nml2Quantity_conductance', 0, 0, {'use': 'required', 'name': 'gbase1'}), + MemberSpec_('gbase2', 'Nml2Quantity_conductance', 0, 0, {'use': 'required', 'name': 'gbase2'}), + MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'erev'}), ] subclass = None superclass = BaseVoltageDepSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase1=None, gbase2=None, erev=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase1=None, gbase2=None, erev=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConductanceBasedSynapseTwo, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BaseConductanceBasedSynapseTwo"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.gbase1 = _cast(None, gbase1) + self.gbase1_nsprefix_ = None self.gbase2 = _cast(None, gbase2) + self.gbase2_nsprefix_ = None self.erev = _cast(None, erev) + self.erev_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -22199,23 +29965,49 @@ def factory(*args_, **kwargs_): else: return BaseConductanceBasedSynapseTwo(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_gbase1(self): + return self.gbase1 + def set_gbase1(self, gbase1): + self.gbase1 = gbase1 + def get_gbase2(self): + return self.gbase2 + def set_gbase2(self, gbase2): + self.gbase2 = gbase2 + def get_erev(self): + return self.erev + def set_erev(self, erev): + self.erev = erev + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductance_patterns_, )) + validate_Nml2Quantity_conductance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] + def _hasContent(self): if ( - super(BaseConductanceBasedSynapseTwo, self).hasContent_() + super(BaseConductanceBasedSynapseTwo, self)._hasContent() ): return True else: @@ -22228,44 +30020,54 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseConductanceBasedSynapseTwo': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapseTwo') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapseTwo') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConductanceBasedSynapseTwo', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConductanceBasedSynapseTwo', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConductanceBasedSynapseTwo'): - super(BaseConductanceBasedSynapseTwo, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapseTwo') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConductanceBasedSynapseTwo'): + super(BaseConductanceBasedSynapseTwo, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapseTwo') if self.gbase1 is not None and 'gbase1' not in already_processed: already_processed.add('gbase1') - outfile.write(' gbase1=%s' % (quote_attrib(self.gbase1), )) + outfile.write(' gbase1=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.gbase1), input_name='gbase1')), )) if self.gbase2 is not None and 'gbase2' not in already_processed: already_processed.add('gbase2') - outfile.write(' gbase2=%s' % (quote_attrib(self.gbase2), )) + outfile.write(' gbase2=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.gbase2), input_name='gbase2')), )) if self.erev is not None and 'erev' not in already_processed: already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) + outfile.write(' erev=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.erev), input_name='erev')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConductanceBasedSynapseTwo', fromsubclass_=False, pretty_print=True): - super(BaseConductanceBasedSynapseTwo, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConductanceBasedSynapseTwo', fromsubclass_=False, pretty_print=True): + super(BaseConductanceBasedSynapseTwo, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('gbase1', node) if value is not None and 'gbase1' not in already_processed: already_processed.add('gbase1') @@ -22285,26 +30087,32 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BaseConductanceBasedSynapseTwo, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConductanceBasedSynapseTwo, self).buildChildren(child_, node, nodeName_, True) + super(BaseConductanceBasedSynapseTwo, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BaseConductanceBasedSynapseTwo, self)._buildChildren(child_, node, nodeName_, True) pass # end class BaseConductanceBasedSynapseTwo class BaseConductanceBasedSynapse(BaseVoltageDepSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('gbase', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_('gbase', 'Nml2Quantity_conductance', 0, 0, {'use': 'required', 'name': 'gbase'}), + MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'erev'}), ] subclass = None superclass = BaseVoltageDepSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConductanceBasedSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BaseConductanceBasedSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) self.gbase = _cast(None, gbase) + self.gbase_nsprefix_ = None self.erev = _cast(None, erev) + self.erev_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -22317,23 +30125,45 @@ def factory(*args_, **kwargs_): else: return BaseConductanceBasedSynapse(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_gbase(self): + return self.gbase + def set_gbase(self, gbase): + self.gbase = gbase + def get_erev(self): + return self.erev + def set_erev(self, erev): + self.erev = erev + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_conductance_patterns_, )) + validate_Nml2Quantity_conductance_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$']] def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] + def _hasContent(self): if ( - super(BaseConductanceBasedSynapse, self).hasContent_() + super(BaseConductanceBasedSynapse, self)._hasContent() ): return True else: @@ -22346,41 +30176,51 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BaseConductanceBasedSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConductanceBasedSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConductanceBasedSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConductanceBasedSynapse'): - super(BaseConductanceBasedSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConductanceBasedSynapse'): + super(BaseConductanceBasedSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapse') if self.gbase is not None and 'gbase' not in already_processed: already_processed.add('gbase') - outfile.write(' gbase=%s' % (quote_attrib(self.gbase), )) + outfile.write(' gbase=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.gbase), input_name='gbase')), )) if self.erev is not None and 'erev' not in already_processed: already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) + outfile.write(' erev=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.erev), input_name='erev')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConductanceBasedSynapse', fromsubclass_=False, pretty_print=True): - super(BaseConductanceBasedSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConductanceBasedSynapse', fromsubclass_=False, pretty_print=True): + super(BaseConductanceBasedSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('gbase', node) if value is not None and 'gbase' not in already_processed: already_processed.add('gbase') @@ -22395,27 +30235,32 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(BaseConductanceBasedSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConductanceBasedSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BaseConductanceBasedSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(BaseConductanceBasedSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class BaseConductanceBasedSynapse class IonChannelVShift(IonChannel): - """Same as ionChannel, but with a vShift parameter to change voltage - activation of gates. The exact usage of vShift in expressions - for rates is determined by the individual gates.""" + """IonChannelVShift -- Same as ionChannel, but with a vShift parameter to change voltage activation of gates. The exact usage of vShift in expressions for rates is determined by the individual gates. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('v_shift', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_('v_shift', 'Nml2Quantity_voltage', 0, 0, {'use': 'required', 'name': 'v_shift'}), ] subclass = None superclass = IonChannel - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, species=None, type=None, conductance=None, gates=None, gate_hh_rates=None, gate_h_hrates_taus=None, gate_hh_tau_infs=None, gate_h_hrates_infs=None, gate_h_hrates_tau_infs=None, gate_hh_instantaneouses=None, gate_fractionals=None, v_shift=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, species=None, type=None, conductance=None, gates=None, gate_hh_rates=None, gate_h_hrates_taus=None, gate_hh_tau_infs=None, gate_h_hrates_infs=None, gate_h_hrates_tau_infs=None, gate_hh_instantaneouses=None, gate_fractionals=None, v_shift=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannelVShift, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, q10_conductance_scalings, species, type, conductance, gates, gate_hh_rates, gate_h_hrates_taus, gate_hh_tau_infs, gate_h_hrates_infs, gate_h_hrates_tau_infs, gate_hh_instantaneouses, gate_fractionals, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IonChannelVShift"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, q10_conductance_scalings, species, type, conductance, gates, gate_hh_rates, gate_h_hrates_taus, gate_hh_tau_infs, gate_h_hrates_infs, gate_h_hrates_tau_infs, gate_hh_instantaneouses, gate_fractionals, **kwargs_) self.v_shift = _cast(None, v_shift) + self.v_shift_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -22427,16 +30272,28 @@ def factory(*args_, **kwargs_): else: return IonChannelVShift(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_vShift(self): + return self.v_shift + def set_vShift(self, v_shift): + self.v_shift = v_shift def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_voltage_patterns_, )) + validate_Nml2Quantity_voltage_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$']] + def _hasContent(self): if ( - super(IonChannelVShift, self).hasContent_() + super(IonChannelVShift, self)._hasContent() ): return True else: @@ -22449,59 +30306,69 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IonChannelVShift': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelVShift') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelVShift') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelVShift', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelVShift', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelVShift'): - super(IonChannelVShift, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelVShift') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelVShift'): + super(IonChannelVShift, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelVShift') if self.v_shift is not None and 'v_shift' not in already_processed: already_processed.add('v_shift') - outfile.write(' vShift=%s' % (quote_attrib(self.v_shift), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelVShift', fromsubclass_=False, pretty_print=True): - super(IonChannelVShift, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' vShift=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.v_shift), input_name='vShift')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelVShift', fromsubclass_=False, pretty_print=True): + super(IonChannelVShift, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('vShift', node) if value is not None and 'vShift' not in already_processed: already_processed.add('vShift') self.v_shift = value self.validate_Nml2Quantity_voltage(self.v_shift) # validate type Nml2Quantity_voltage - super(IonChannelVShift, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IonChannelVShift, self).buildChildren(child_, node, nodeName_, True) + super(IonChannelVShift, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(IonChannelVShift, self)._buildChildren(child_, node, nodeName_, True) pass # end class IonChannelVShift class IonChannelHH(IonChannel): - """Note ionChannel and ionChannelHH are currently functionally - identical. This is needed since many existing examples use - ionChannel, some use ionChannelHH. One of these should be - removed, probably ionChannelHH.""" + """IonChannelHH -- Note ionChannel and ionChannelHH are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. + One of these should be removed, probably ionChannelHH. + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = IonChannel - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, species=None, type=None, conductance=None, gates=None, gate_hh_rates=None, gate_h_hrates_taus=None, gate_hh_tau_infs=None, gate_h_hrates_infs=None, gate_h_hrates_tau_infs=None, gate_hh_instantaneouses=None, gate_fractionals=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, species=None, type=None, conductance=None, gates=None, gate_hh_rates=None, gate_h_hrates_taus=None, gate_hh_tau_infs=None, gate_h_hrates_infs=None, gate_h_hrates_tau_infs=None, gate_hh_instantaneouses=None, gate_fractionals=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannelHH, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, q10_conductance_scalings, species, type, conductance, gates, gate_hh_rates, gate_h_hrates_taus, gate_hh_tau_infs, gate_h_hrates_infs, gate_h_hrates_tau_infs, gate_hh_instantaneouses, gate_fractionals, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IonChannelHH"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, q10_conductance_scalings, species, type, conductance, gates, gate_hh_rates, gate_h_hrates_taus, gate_hh_tau_infs, gate_h_hrates_infs, gate_h_hrates_tau_infs, gate_hh_instantaneouses, gate_fractionals, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -22513,9 +30380,13 @@ def factory(*args_, **kwargs_): else: return IonChannelHH(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(IonChannelHH, self).hasContent_() + super(IonChannelHH, self)._hasContent() ): return True else: @@ -22528,47 +30399,57 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IonChannelHH': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelHH') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelHH') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelHH', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelHH', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelHH'): - super(IonChannelHH, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelHH') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelHH', fromsubclass_=False, pretty_print=True): - super(IonChannelHH, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelHH'): + super(IonChannelHH, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelHH') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelHH', fromsubclass_=False, pretty_print=True): + super(IonChannelHH, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(IonChannelHH, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IonChannelHH, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(IonChannelHH, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(IonChannelHH, self)._buildChildren(child_, node, nodeName_, True) pass # end class IonChannelHH class IF_curr_exp(basePyNNIaFCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = basePyNNIaFCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IF_curr_exp, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IF_curr_exp"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -22580,9 +30461,13 @@ def factory(*args_, **kwargs_): else: return IF_curr_exp(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(IF_curr_exp, self).hasContent_() + super(IF_curr_exp, self)._hasContent() ): return True else: @@ -22595,47 +30480,57 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IF_curr_exp': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_exp') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_exp') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_curr_exp', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_curr_exp', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_curr_exp'): - super(IF_curr_exp, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_exp') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_curr_exp', fromsubclass_=False, pretty_print=True): - super(IF_curr_exp, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_curr_exp'): + super(IF_curr_exp, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_exp') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_curr_exp', fromsubclass_=False, pretty_print=True): + super(IF_curr_exp, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(IF_curr_exp, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IF_curr_exp, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(IF_curr_exp, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(IF_curr_exp, self)._buildChildren(child_, node, nodeName_, True) pass # end class IF_curr_exp class IF_curr_alpha(basePyNNIaFCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = basePyNNIaFCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IF_curr_alpha, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IF_curr_alpha"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -22647,9 +30542,13 @@ def factory(*args_, **kwargs_): else: return IF_curr_alpha(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(IF_curr_alpha, self).hasContent_() + super(IF_curr_alpha, self)._hasContent() ): return True else: @@ -22662,51 +30561,63 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IF_curr_alpha': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_alpha') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_alpha') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_curr_alpha', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_curr_alpha', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_curr_alpha'): - super(IF_curr_alpha, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_alpha') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_curr_alpha', fromsubclass_=False, pretty_print=True): - super(IF_curr_alpha, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_curr_alpha'): + super(IF_curr_alpha, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_alpha') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_curr_alpha', fromsubclass_=False, pretty_print=True): + super(IF_curr_alpha, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(IF_curr_alpha, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IF_curr_alpha, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(IF_curr_alpha, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(IF_curr_alpha, self)._buildChildren(child_, node, nodeName_, True) pass # end class IF_curr_alpha class basePyNNIaFCondCell(basePyNNIaFCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('e_rev_E', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_I', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_('e_rev_E', 'xs:float', 0, 0, {'use': 'required', 'name': 'e_rev_E'}), + MemberSpec_('e_rev_I', 'xs:float', 0, 0, {'use': 'required', 'name': 'e_rev_I'}), ] subclass = None superclass = basePyNNIaFCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(basePyNNIaFCondCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("basePyNNIaFCondCell"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, extensiontype_, **kwargs_) self.e_rev_E = _cast(float, e_rev_E) + self.e_rev_E_nsprefix_ = None self.e_rev_I = _cast(float, e_rev_I) + self.e_rev_I_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -22719,9 +30630,23 @@ def factory(*args_, **kwargs_): else: return basePyNNIaFCondCell(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_e_rev_E(self): + return self.e_rev_E + def set_e_rev_E(self, e_rev_E): + self.e_rev_E = e_rev_E + def get_e_rev_I(self): + return self.e_rev_I + def set_e_rev_I(self, e_rev_I): + self.e_rev_I = e_rev_I + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( - super(basePyNNIaFCondCell, self).hasContent_() + super(basePyNNIaFCondCell, self)._hasContent() ): return True else: @@ -22734,21 +30659,23 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='b eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'basePyNNIaFCondCell': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCondCell') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCondCell') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='basePyNNIaFCondCell', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='basePyNNIaFCondCell', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='basePyNNIaFCondCell'): - super(basePyNNIaFCondCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCondCell') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='basePyNNIaFCondCell'): + super(basePyNNIaFCondCell, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCondCell') if self.e_rev_E is not None and 'e_rev_E' not in already_processed: already_processed.add('e_rev_E') outfile.write(' e_rev_E="%s"' % self.gds_format_float(self.e_rev_E, input_name='e_rev_E')) @@ -22758,52 +30685,62 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNIaFCondCell', fromsubclass_=False, pretty_print=True): - super(basePyNNIaFCondCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNIaFCondCell', fromsubclass_=False, pretty_print=True): + super(basePyNNIaFCondCell, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('e_rev_E', node) if value is not None and 'e_rev_E' not in already_processed: already_processed.add('e_rev_E') - try: - self.e_rev_E = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_E): %s' % exp) + value = self.gds_parse_float(value, node, 'e_rev_E') + self.e_rev_E = value value = find_attr_value_('e_rev_I', node) if value is not None and 'e_rev_I' not in already_processed: already_processed.add('e_rev_I') - try: - self.e_rev_I = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_I): %s' % exp) + value = self.gds_parse_float(value, node, 'e_rev_I') + self.e_rev_I = value value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(basePyNNIaFCondCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(basePyNNIaFCondCell, self).buildChildren(child_, node, nodeName_, True) + super(basePyNNIaFCondCell, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(basePyNNIaFCondCell, self)._buildChildren(child_, node, nodeName_, True) pass # end class basePyNNIaFCondCell class ContinuousConnectionInstance(ContinuousConnection): - """Individual continuous/analog synaptic connection - instance based""" + """ContinuousConnectionInstance -- Individual continuous/analog synaptic connection - instance based + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = ContinuousConnection - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', pre_component=None, post_component=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', pre_component=None, post_component=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ContinuousConnectionInstance, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, pre_component, post_component, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ContinuousConnectionInstance"), self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, pre_component, post_component, extensiontype_, **kwargs_) self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -22816,9 +30753,15 @@ def factory(*args_, **kwargs_): else: return ContinuousConnectionInstance(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( - super(ContinuousConnectionInstance, self).hasContent_() + super(ContinuousConnectionInstance, self)._hasContent() ): return True else: @@ -22831,42 +30774,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ContinuousConnectionInstance': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstance') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstance') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousConnectionInstance', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousConnectionInstance', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousConnectionInstance'): - super(ContinuousConnectionInstance, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstance') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousConnectionInstance'): + super(ContinuousConnectionInstance, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstance') if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnectionInstance', fromsubclass_=False, pretty_print=True): - super(ContinuousConnectionInstance, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnectionInstance', fromsubclass_=False, pretty_print=True): + super(ContinuousConnectionInstance, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(ContinuousConnectionInstance, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ContinuousConnectionInstance, self).buildChildren(child_, node, nodeName_, True) + super(ContinuousConnectionInstance, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ContinuousConnectionInstance, self)._buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): @@ -22885,16 +30838,21 @@ def __str__(self): class ElectricalConnectionInstance(ElectricalConnection): - """Projection between two populations consisting of analog connections - (e.g. graded synapses)""" + """ElectricalConnectionInstance -- Projection between two populations consisting of analog connections (e.g. graded synapses) + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = ElectricalConnection - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', synapse=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', synapse=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ElectricalConnectionInstance, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, synapse, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ElectricalConnectionInstance"), self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, synapse, extensiontype_, **kwargs_) self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -22907,9 +30865,15 @@ def factory(*args_, **kwargs_): else: return ElectricalConnectionInstance(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( - super(ElectricalConnectionInstance, self).hasContent_() + super(ElectricalConnectionInstance, self)._hasContent() ): return True else: @@ -22922,42 +30886,52 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ElectricalConnectionInstance': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstance') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstance') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalConnectionInstance', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalConnectionInstance', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalConnectionInstance'): - super(ElectricalConnectionInstance, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstance') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalConnectionInstance'): + super(ElectricalConnectionInstance, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstance') if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnectionInstance', fromsubclass_=False, pretty_print=True): - super(ElectricalConnectionInstance, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnectionInstance', fromsubclass_=False, pretty_print=True): + super(ElectricalConnectionInstance, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(ElectricalConnectionInstance, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ElectricalConnectionInstance, self).buildChildren(child_, node, nodeName_, True) + super(ElectricalConnectionInstance, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ElectricalConnectionInstance, self)._buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): @@ -22975,20 +30949,27 @@ def __str__(self): class ExpThreeSynapse(BaseConductanceBasedSynapseTwo): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau_decay1', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('tau_decay2', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('tau_rise', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('tau_decay1', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'tau_decay1'}), + MemberSpec_('tau_decay2', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'tau_decay2'}), + MemberSpec_('tau_rise', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'tau_rise'}), ] subclass = None superclass = BaseConductanceBasedSynapseTwo - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase1=None, gbase2=None, erev=None, tau_decay1=None, tau_decay2=None, tau_rise=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase1=None, gbase2=None, erev=None, tau_decay1=None, tau_decay2=None, tau_rise=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ExpThreeSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase1, gbase2, erev, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ExpThreeSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase1, gbase2, erev, **kwargs_) self.tau_decay1 = _cast(None, tau_decay1) + self.tau_decay1_nsprefix_ = None self.tau_decay2 = _cast(None, tau_decay2) + self.tau_decay2_nsprefix_ = None self.tau_rise = _cast(None, tau_rise) + self.tau_rise_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -23000,16 +30981,36 @@ def factory(*args_, **kwargs_): else: return ExpThreeSynapse(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_tauDecay1(self): + return self.tau_decay1 + def set_tauDecay1(self, tau_decay1): + self.tau_decay1 = tau_decay1 + def get_tauDecay2(self): + return self.tau_decay2 + def set_tauDecay2(self, tau_decay2): + self.tau_decay2 = tau_decay2 + def get_tauRise(self): + return self.tau_rise + def set_tauRise(self, tau_rise): + self.tau_rise = tau_rise def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(ExpThreeSynapse, self).hasContent_() + super(ExpThreeSynapse, self)._hasContent() ): return True else: @@ -23022,40 +31023,46 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ExpThreeSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpThreeSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpThreeSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpThreeSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpThreeSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpThreeSynapse'): - super(ExpThreeSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpThreeSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpThreeSynapse'): + super(ExpThreeSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpThreeSynapse') if self.tau_decay1 is not None and 'tau_decay1' not in already_processed: already_processed.add('tau_decay1') - outfile.write(' tauDecay1=%s' % (quote_attrib(self.tau_decay1), )) + outfile.write(' tauDecay1=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tau_decay1), input_name='tauDecay1')), )) if self.tau_decay2 is not None and 'tau_decay2' not in already_processed: already_processed.add('tau_decay2') - outfile.write(' tauDecay2=%s' % (quote_attrib(self.tau_decay2), )) + outfile.write(' tauDecay2=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tau_decay2), input_name='tauDecay2')), )) if self.tau_rise is not None and 'tau_rise' not in already_processed: already_processed.add('tau_rise') - outfile.write(' tauRise=%s' % (quote_attrib(self.tau_rise), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpThreeSynapse', fromsubclass_=False, pretty_print=True): - super(ExpThreeSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' tauRise=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tau_rise), input_name='tauRise')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpThreeSynapse', fromsubclass_=False, pretty_print=True): + super(ExpThreeSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('tauDecay1', node) if value is not None and 'tauDecay1' not in already_processed: already_processed.add('tauDecay1') @@ -23071,26 +31078,32 @@ def buildAttributes(self, node, attrs, already_processed): already_processed.add('tauRise') self.tau_rise = value self.validate_Nml2Quantity_time(self.tau_rise) # validate type Nml2Quantity_time - super(ExpThreeSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ExpThreeSynapse, self).buildChildren(child_, node, nodeName_, True) + super(ExpThreeSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ExpThreeSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class ExpThreeSynapse class ExpTwoSynapse(BaseConductanceBasedSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau_decay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('tau_rise', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('tau_decay', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'tau_decay'}), + MemberSpec_('tau_rise', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'tau_rise'}), ] subclass = None superclass = BaseConductanceBasedSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau_decay=None, tau_rise=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau_decay=None, tau_rise=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ExpTwoSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ExpTwoSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, extensiontype_, **kwargs_) self.tau_decay = _cast(None, tau_decay) + self.tau_decay_nsprefix_ = None self.tau_rise = _cast(None, tau_rise) + self.tau_rise_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -23103,16 +31116,34 @@ def factory(*args_, **kwargs_): else: return ExpTwoSynapse(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_tauDecay(self): + return self.tau_decay + def set_tauDecay(self, tau_decay): + self.tau_decay = tau_decay + def get_tauRise(self): + return self.tau_rise + def set_tauRise(self, tau_rise): + self.tau_rise = tau_rise + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(ExpTwoSynapse, self).hasContent_() + super(ExpTwoSynapse, self)._hasContent() ): return True else: @@ -23125,41 +31156,51 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ExpTwoSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpTwoSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpTwoSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpTwoSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpTwoSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpTwoSynapse'): - super(ExpTwoSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpTwoSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpTwoSynapse'): + super(ExpTwoSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpTwoSynapse') if self.tau_decay is not None and 'tau_decay' not in already_processed: already_processed.add('tau_decay') - outfile.write(' tauDecay=%s' % (quote_attrib(self.tau_decay), )) + outfile.write(' tauDecay=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tau_decay), input_name='tauDecay')), )) if self.tau_rise is not None and 'tau_rise' not in already_processed: already_processed.add('tau_rise') - outfile.write(' tauRise=%s' % (quote_attrib(self.tau_rise), )) + outfile.write(' tauRise=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tau_rise), input_name='tauRise')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpTwoSynapse', fromsubclass_=False, pretty_print=True): - super(ExpTwoSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpTwoSynapse', fromsubclass_=False, pretty_print=True): + super(ExpTwoSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('tauDecay', node) if value is not None and 'tauDecay' not in already_processed: already_processed.add('tauDecay') @@ -23174,24 +31215,29 @@ def buildAttributes(self, node, attrs, already_processed): if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(ExpTwoSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ExpTwoSynapse, self).buildChildren(child_, node, nodeName_, True) + super(ExpTwoSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ExpTwoSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class ExpTwoSynapse class ExpOneSynapse(BaseConductanceBasedSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau_decay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('tau_decay', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'tau_decay'}), ] subclass = None superclass = BaseConductanceBasedSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau_decay=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau_decay=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ExpOneSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ExpOneSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, **kwargs_) self.tau_decay = _cast(None, tau_decay) + self.tau_decay_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -23203,16 +31249,28 @@ def factory(*args_, **kwargs_): else: return ExpOneSynapse(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_tauDecay(self): + return self.tau_decay + def set_tauDecay(self, tau_decay): + self.tau_decay = tau_decay def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(ExpOneSynapse, self).hasContent_() + super(ExpOneSynapse, self)._hasContent() ): return True else: @@ -23225,57 +31283,68 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ExpOneSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpOneSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpOneSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpOneSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpOneSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpOneSynapse'): - super(ExpOneSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpOneSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpOneSynapse'): + super(ExpOneSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpOneSynapse') if self.tau_decay is not None and 'tau_decay' not in already_processed: already_processed.add('tau_decay') - outfile.write(' tauDecay=%s' % (quote_attrib(self.tau_decay), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpOneSynapse', fromsubclass_=False, pretty_print=True): - super(ExpOneSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' tauDecay=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tau_decay), input_name='tauDecay')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpOneSynapse', fromsubclass_=False, pretty_print=True): + super(ExpOneSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('tauDecay', node) if value is not None and 'tauDecay' not in already_processed: already_processed.add('tauDecay') self.tau_decay = value self.validate_Nml2Quantity_time(self.tau_decay) # validate type Nml2Quantity_time - super(ExpOneSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ExpOneSynapse, self).buildChildren(child_, node, nodeName_, True) + super(ExpOneSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ExpOneSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class ExpOneSynapse class AlphaSynapse(BaseConductanceBasedSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_('tau', 'Nml2Quantity_time', 0, 0, {'use': 'required', 'name': 'tau'}), ] subclass = None superclass = BaseConductanceBasedSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(AlphaSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, **kwargs_) + self.ns_prefix_ = None + super(globals().get("AlphaSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, **kwargs_) self.tau = _cast(None, tau) + self.tau_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -23287,16 +31356,28 @@ def factory(*args_, **kwargs_): else: return AlphaSynapse(*args_, **kwargs_) factory = staticmethod(factory) + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_tau(self): + return self.tau + def set_tau(self, tau): + self.tau = tau def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) + return False if not self.gds_validate_simple_patterns( self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): + self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_Nml2Quantity_time_patterns_, )) + validate_Nml2Quantity_time_patterns_ = [['^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$']] + def _hasContent(self): if ( - super(AlphaSynapse, self).hasContent_() + super(AlphaSynapse, self)._hasContent() ): return True else: @@ -23309,65 +31390,80 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='A eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'AlphaSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaSynapse'): - super(AlphaSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaSynapse') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaSynapse'): + super(AlphaSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaSynapse') if self.tau is not None and 'tau' not in already_processed: already_processed.add('tau') - outfile.write(' tau=%s' % (quote_attrib(self.tau), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaSynapse', fromsubclass_=False, pretty_print=True): - super(AlphaSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write(' tau=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tau), input_name='tau')), )) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaSynapse', fromsubclass_=False, pretty_print=True): + super(AlphaSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('tau', node) if value is not None and 'tau' not in already_processed: already_processed.add('tau') self.tau = value self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time - super(AlphaSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(AlphaSynapse, self).buildChildren(child_, node, nodeName_, True) + super(AlphaSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(AlphaSynapse, self)._buildChildren(child_, node, nodeName_, True) pass # end class AlphaSynapse class EIF_cond_exp_isfa_ista(basePyNNIaFCondCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('a', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('delta_T', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('tau_w', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_spike', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_('a', 'xs:float', 0, 0, {'use': 'required', 'name': 'a'}), + MemberSpec_('b', 'xs:float', 0, 0, {'use': 'required', 'name': 'b'}), + MemberSpec_('delta_T', 'xs:float', 0, 0, {'use': 'required', 'name': 'delta_T'}), + MemberSpec_('tau_w', 'xs:float', 0, 0, {'use': 'required', 'name': 'tau_w'}), + MemberSpec_('v_spike', 'xs:float', 0, 0, {'use': 'required', 'name': 'v_spike'}), ] subclass = None superclass = basePyNNIaFCondCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, a=None, b=None, delta_T=None, tau_w=None, v_spike=None, extensiontype_=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, a=None, b=None, delta_T=None, tau_w=None, v_spike=None, extensiontype_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(EIF_cond_exp_isfa_ista, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, extensiontype_, **kwargs_) + self.ns_prefix_ = None + super(globals().get("EIF_cond_exp_isfa_ista"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, extensiontype_, **kwargs_) self.a = _cast(float, a) + self.a_nsprefix_ = None self.b = _cast(float, b) + self.b_nsprefix_ = None self.delta_T = _cast(float, delta_T) + self.delta_T_nsprefix_ = None self.tau_w = _cast(float, tau_w) + self.tau_w_nsprefix_ = None self.v_spike = _cast(float, v_spike) + self.v_spike_nsprefix_ = None self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: @@ -23380,9 +31476,35 @@ def factory(*args_, **kwargs_): else: return EIF_cond_exp_isfa_ista(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_a(self): + return self.a + def set_a(self, a): + self.a = a + def get_b(self): + return self.b + def set_b(self, b): + self.b = b + def get_delta_T(self): + return self.delta_T + def set_delta_T(self, delta_T): + self.delta_T = delta_T + def get_tau_w(self): + return self.tau_w + def set_tau_w(self, tau_w): + self.tau_w = tau_w + def get_v_spike(self): + return self.v_spike + def set_v_spike(self, v_spike): + self.v_spike = v_spike + def get_extensiontype_(self): return self.extensiontype_ + def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ + def _hasContent(self): if ( - super(EIF_cond_exp_isfa_ista, self).hasContent_() + super(EIF_cond_exp_isfa_ista, self)._hasContent() ): return True else: @@ -23395,21 +31517,23 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'EIF_cond_exp_isfa_ista': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_exp_isfa_ista') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_exp_isfa_ista') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EIF_cond_exp_isfa_ista', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EIF_cond_exp_isfa_ista', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EIF_cond_exp_isfa_ista'): - super(EIF_cond_exp_isfa_ista, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_exp_isfa_ista') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EIF_cond_exp_isfa_ista'): + super(EIF_cond_exp_isfa_ista, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_exp_isfa_ista') if self.a is not None and 'a' not in already_processed: already_processed.add('a') outfile.write(' a="%s"' % self.gds_format_float(self.a, input_name='a')) @@ -23428,72 +31552,74 @@ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_=' if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EIF_cond_exp_isfa_ista', fromsubclass_=False, pretty_print=True): - super(EIF_cond_exp_isfa_ista, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') + outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EIF_cond_exp_isfa_ista', fromsubclass_=False, pretty_print=True): + super(EIF_cond_exp_isfa_ista, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('a', node) if value is not None and 'a' not in already_processed: already_processed.add('a') - try: - self.a = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (a): %s' % exp) + value = self.gds_parse_float(value, node, 'a') + self.a = value value = find_attr_value_('b', node) if value is not None and 'b' not in already_processed: already_processed.add('b') - try: - self.b = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (b): %s' % exp) + value = self.gds_parse_float(value, node, 'b') + self.b = value value = find_attr_value_('delta_T', node) if value is not None and 'delta_T' not in already_processed: already_processed.add('delta_T') - try: - self.delta_T = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (delta_T): %s' % exp) + value = self.gds_parse_float(value, node, 'delta_T') + self.delta_T = value value = find_attr_value_('tau_w', node) if value is not None and 'tau_w' not in already_processed: already_processed.add('tau_w') - try: - self.tau_w = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_w): %s' % exp) + value = self.gds_parse_float(value, node, 'tau_w') + self.tau_w = value value = find_attr_value_('v_spike', node) if value is not None and 'v_spike' not in already_processed: already_processed.add('v_spike') - try: - self.v_spike = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (v_spike): %s' % exp) + value = self.gds_parse_float(value, node, 'v_spike') + self.v_spike = value value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value - super(EIF_cond_exp_isfa_ista, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(EIF_cond_exp_isfa_ista, self).buildChildren(child_, node, nodeName_, True) + super(EIF_cond_exp_isfa_ista, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(EIF_cond_exp_isfa_ista, self)._buildChildren(child_, node, nodeName_, True) pass # end class EIF_cond_exp_isfa_ista class IF_cond_exp(basePyNNIaFCondCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = basePyNNIaFCondCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IF_cond_exp, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IF_cond_exp"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -23505,9 +31631,13 @@ def factory(*args_, **kwargs_): else: return IF_cond_exp(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(IF_cond_exp, self).hasContent_() + super(IF_cond_exp, self)._hasContent() ): return True else: @@ -23520,47 +31650,57 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IF_cond_exp': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_exp') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_exp') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_cond_exp', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_cond_exp', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_cond_exp'): - super(IF_cond_exp, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_exp') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_cond_exp', fromsubclass_=False, pretty_print=True): - super(IF_cond_exp, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_cond_exp'): + super(IF_cond_exp, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_exp') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_cond_exp', fromsubclass_=False, pretty_print=True): + super(IF_cond_exp, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(IF_cond_exp, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IF_cond_exp, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(IF_cond_exp, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(IF_cond_exp, self)._buildChildren(child_, node, nodeName_, True) pass # end class IF_cond_exp class IF_cond_alpha(basePyNNIaFCondCell): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = basePyNNIaFCondCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(IF_cond_alpha, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, **kwargs_) + self.ns_prefix_ = None + super(globals().get("IF_cond_alpha"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -23572,9 +31712,13 @@ def factory(*args_, **kwargs_): else: return IF_cond_alpha(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(IF_cond_alpha, self).hasContent_() + super(IF_cond_alpha, self)._hasContent() ): return True else: @@ -23587,51 +31731,63 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='I eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'IF_cond_alpha': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_alpha') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_alpha') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_cond_alpha', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_cond_alpha', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_cond_alpha'): - super(IF_cond_alpha, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_alpha') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_cond_alpha', fromsubclass_=False, pretty_print=True): - super(IF_cond_alpha, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_cond_alpha'): + super(IF_cond_alpha, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_alpha') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_cond_alpha', fromsubclass_=False, pretty_print=True): + super(IF_cond_alpha, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(IF_cond_alpha, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IF_cond_alpha, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(IF_cond_alpha, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(IF_cond_alpha, self)._buildChildren(child_, node, nodeName_, True) pass # end class IF_cond_alpha class ContinuousConnectionInstanceW(ContinuousConnectionInstance): - """Individual continuous/analog synaptic connection - instance based. - Includes setting of _weight for the connection""" + """ContinuousConnectionInstanceW -- Individual continuous/analog synaptic connection - instance based. Includes setting of _weight for the connection + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('weight', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_('weight', 'xs:float', 0, 0, {'use': 'required', 'name': 'weight'}), ] subclass = None superclass = ContinuousConnectionInstance - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', pre_component=None, post_component=None, weight=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', pre_component=None, post_component=None, weight=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ContinuousConnectionInstanceW, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, pre_component, post_component, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ContinuousConnectionInstanceW"), self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, pre_component, post_component, **kwargs_) self.weight = _cast(float, weight) + self.weight_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -23643,9 +31799,17 @@ def factory(*args_, **kwargs_): else: return ContinuousConnectionInstanceW(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_weight(self): + return self.weight + def set_weight(self, weight): + self.weight = weight + def _hasContent(self): if ( - super(ContinuousConnectionInstanceW, self).hasContent_() + super(ContinuousConnectionInstanceW, self)._hasContent() ): return True else: @@ -23658,44 +31822,48 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ContinuousConnectionInstanceW': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstanceW') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstanceW') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousConnectionInstanceW', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousConnectionInstanceW', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousConnectionInstanceW'): - super(ContinuousConnectionInstanceW, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstanceW') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousConnectionInstanceW'): + super(ContinuousConnectionInstanceW, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstanceW') if self.weight is not None and 'weight' not in already_processed: already_processed.add('weight') outfile.write(' weight="%s"' % self.gds_format_float(self.weight, input_name='weight')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnectionInstanceW', fromsubclass_=False, pretty_print=True): - super(ContinuousConnectionInstanceW, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnectionInstanceW', fromsubclass_=False, pretty_print=True): + super(ContinuousConnectionInstanceW, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('weight', node) if value is not None and 'weight' not in already_processed: already_processed.add('weight') - try: - self.weight = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (weight): %s' % exp) - super(ContinuousConnectionInstanceW, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ContinuousConnectionInstanceW, self).buildChildren(child_, node, nodeName_, True) + value = self.gds_parse_float(value, node, 'weight') + self.weight = value + super(ContinuousConnectionInstanceW, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ContinuousConnectionInstanceW, self)._buildChildren(child_, node, nodeName_, True) pass def get_weight(self): @@ -23715,19 +31883,24 @@ def __str__(self): class ElectricalConnectionInstanceW(ElectricalConnectionInstance): - """Projection between two populations consisting of analog connections - (e.g. graded synapses). Includes setting of weight for the - connection""" + """ElectricalConnectionInstanceW -- Projection between two populations consisting of analog connections (e.g. graded synapses). Includes setting of weight for the connection + + """ + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('weight', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_('weight', 'xs:float', 0, 0, {'use': 'required', 'name': 'weight'}), ] subclass = None superclass = ElectricalConnectionInstance - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', synapse=None, weight=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', synapse=None, weight=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(ElectricalConnectionInstanceW, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, synapse, **kwargs_) + self.ns_prefix_ = None + super(globals().get("ElectricalConnectionInstanceW"), self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, synapse, **kwargs_) self.weight = _cast(float, weight) + self.weight_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -23739,9 +31912,17 @@ def factory(*args_, **kwargs_): else: return ElectricalConnectionInstanceW(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_weight(self): + return self.weight + def set_weight(self, weight): + self.weight = weight + def _hasContent(self): if ( - super(ElectricalConnectionInstanceW, self).hasContent_() + super(ElectricalConnectionInstanceW, self)._hasContent() ): return True else: @@ -23754,44 +31935,48 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'ElectricalConnectionInstanceW': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstanceW') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstanceW') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalConnectionInstanceW', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalConnectionInstanceW', pretty_print=pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalConnectionInstanceW'): - super(ElectricalConnectionInstanceW, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstanceW') + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalConnectionInstanceW'): + super(ElectricalConnectionInstanceW, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstanceW') if self.weight is not None and 'weight' not in already_processed: already_processed.add('weight') outfile.write(' weight="%s"' % self.gds_format_float(self.weight, input_name='weight')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnectionInstanceW', fromsubclass_=False, pretty_print=True): - super(ElectricalConnectionInstanceW, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnectionInstanceW', fromsubclass_=False, pretty_print=True): + super(ElectricalConnectionInstanceW, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) pass - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + def _buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('weight', node) if value is not None and 'weight' not in already_processed: already_processed.add('weight') - try: - self.weight = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (weight): %s' % exp) - super(ElectricalConnectionInstanceW, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ElectricalConnectionInstanceW, self).buildChildren(child_, node, nodeName_, True) + value = self.gds_parse_float(value, node, 'weight') + self.weight = value + super(ElectricalConnectionInstanceW, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(ElectricalConnectionInstanceW, self)._buildChildren(child_, node, nodeName_, True) pass def get_weight(self): @@ -23814,18 +31999,24 @@ def __str__(self): class BlockingPlasticSynapse(ExpTwoSynapse): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('plasticity_mechanism', 'PlasticityMechanism', 0, 1, {u'type': u'PlasticityMechanism', u'name': u'plasticityMechanism', u'minOccurs': u'0'}, None), - MemberSpec_('block_mechanism', 'BlockMechanism', 0, 1, {u'type': u'BlockMechanism', u'name': u'blockMechanism', u'minOccurs': u'0'}, None), + MemberSpec_('plasticity_mechanism', 'PlasticityMechanism', 0, 1, {'minOccurs': '0', 'name': 'plasticityMechanism', 'type': 'PlasticityMechanism'}, None), + MemberSpec_('block_mechanism', 'BlockMechanism', 0, 1, {'minOccurs': '0', 'name': 'blockMechanism', 'type': 'BlockMechanism'}, None), ] subclass = None superclass = ExpTwoSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau_decay=None, tau_rise=None, plasticity_mechanism=None, block_mechanism=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau_decay=None, tau_rise=None, plasticity_mechanism=None, block_mechanism=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(BlockingPlasticSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, tau_decay, tau_rise, **kwargs_) + self.ns_prefix_ = None + super(globals().get("BlockingPlasticSynapse"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, tau_decay, tau_rise, **kwargs_) self.plasticity_mechanism = plasticity_mechanism + self.plasticity_mechanism_nsprefix_ = None self.block_mechanism = block_mechanism + self.block_mechanism_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -23837,16 +32028,28 @@ def factory(*args_, **kwargs_): else: return BlockingPlasticSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def get_plasticityMechanism(self): + return self.plasticity_mechanism + def set_plasticityMechanism(self, plasticity_mechanism): + self.plasticity_mechanism = plasticity_mechanism + def get_blockMechanism(self): + return self.block_mechanism + def set_blockMechanism(self, block_mechanism): + self.block_mechanism = block_mechanism + def _hasContent(self): if ( self.plasticity_mechanism is not None or self.block_mechanism is not None or - super(BlockingPlasticSynapse, self).hasContent_() + super(BlockingPlasticSynapse, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BlockingPlasticSynapse', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='BlockingPlasticSynapse', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('BlockingPlasticSynapse') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -23854,64 +32057,76 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='B eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'BlockingPlasticSynapse': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BlockingPlasticSynapse') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BlockingPlasticSynapse') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BlockingPlasticSynapse', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BlockingPlasticSynapse', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BlockingPlasticSynapse'): - super(BlockingPlasticSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BlockingPlasticSynapse') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BlockingPlasticSynapse', fromsubclass_=False, pretty_print=True): - super(BlockingPlasticSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BlockingPlasticSynapse'): + super(BlockingPlasticSynapse, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BlockingPlasticSynapse') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', name_='BlockingPlasticSynapse', fromsubclass_=False, pretty_print=True): + super(BlockingPlasticSynapse, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.plasticity_mechanism is not None: + namespaceprefix_ = self.plasticity_mechanism_nsprefix_ + ':' if (UseCapturedNS_ and self.plasticity_mechanism_nsprefix_) else '' self.plasticity_mechanism.export(outfile, level, namespaceprefix_, namespacedef_='', name_='plasticityMechanism', pretty_print=pretty_print) if self.block_mechanism is not None: + namespaceprefix_ = self.block_mechanism_nsprefix_ + ':' if (UseCapturedNS_ and self.block_mechanism_nsprefix_) else '' self.block_mechanism.export(outfile, level, namespaceprefix_, namespacedef_='', name_='blockMechanism', pretty_print=pretty_print) - def build(self, node): + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(BlockingPlasticSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + def _buildAttributes(self, node, attrs, already_processed): + super(BlockingPlasticSynapse, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'plasticityMechanism': obj_ = PlasticityMechanism.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.plasticity_mechanism = obj_ obj_.original_tagname_ = 'plasticityMechanism' elif nodeName_ == 'blockMechanism': obj_ = BlockMechanism.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.block_mechanism = obj_ obj_.original_tagname_ = 'blockMechanism' - super(BlockingPlasticSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BlockingPlasticSynapse, self)._buildChildren(child_, node, nodeName_, True) # end class BlockingPlasticSynapse class EIF_cond_alpha_isfa_ista(EIF_cond_exp_isfa_ista): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = EIF_cond_exp_isfa_ista - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, a=None, b=None, delta_T=None, tau_w=None, v_spike=None, **kwargs_): + def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, a=None, b=None, delta_T=None, tau_w=None, v_spike=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') - super(EIF_cond_alpha_isfa_ista, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, a, b, delta_T, tau_w, v_spike, **kwargs_) + self.ns_prefix_ = None + super(globals().get("EIF_cond_alpha_isfa_ista"), self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, a, b, delta_T, tau_w, v_spike, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( @@ -23923,9 +32138,13 @@ def factory(*args_, **kwargs_): else: return EIF_cond_alpha_isfa_ista(*args_, **kwargs_) factory = staticmethod(factory) - def hasContent_(self): + def get_ns_prefix_(self): + return self.ns_prefix_ + def set_ns_prefix_(self, ns_prefix): + self.ns_prefix_ = ns_prefix + def _hasContent(self): if ( - super(EIF_cond_alpha_isfa_ista, self).hasContent_() + super(EIF_cond_alpha_isfa_ista, self)._hasContent() ): return True else: @@ -23938,34 +32157,40 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='E eol_ = '\n' else: eol_ = '' - if self.original_tagname_ is not None: + if self.original_tagname_ is not None and name_ == 'EIF_cond_alpha_isfa_ista': name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_alpha_isfa_ista') - if self.hasContent_(): + self._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_alpha_isfa_ista') + if self._hasContent(): outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EIF_cond_alpha_isfa_ista', pretty_print=pretty_print) + self._exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EIF_cond_alpha_isfa_ista', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EIF_cond_alpha_isfa_ista'): - super(EIF_cond_alpha_isfa_ista, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_alpha_isfa_ista') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EIF_cond_alpha_isfa_ista', fromsubclass_=False, pretty_print=True): - super(EIF_cond_alpha_isfa_ista, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + def _exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EIF_cond_alpha_isfa_ista'): + super(EIF_cond_alpha_isfa_ista, self)._exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_alpha_isfa_ista') + def _exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EIF_cond_alpha_isfa_ista', fromsubclass_=False, pretty_print=True): + super(EIF_cond_alpha_isfa_ista, self)._exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(EIF_cond_alpha_isfa_ista, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(EIF_cond_alpha_isfa_ista, self).buildChildren(child_, node, nodeName_, True) + def _buildAttributes(self, node, attrs, already_processed): + super(EIF_cond_alpha_isfa_ista, self)._buildAttributes(node, attrs, already_processed) + def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): + super(EIF_cond_alpha_isfa_ista, self)._buildChildren(child_, node, nodeName_, True) pass # end class EIF_cond_alpha_isfa_ista @@ -23993,7 +32218,26 @@ def get_root_tag(node): return tag, rootClass -def parse(inFileName, silence=False): +def get_required_ns_prefix_defs(rootNode): + '''Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + ''' + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = ' '.join([ + 'xmlns:{}="{}"'.format(prefix, uri) + for prefix, uri in nsmap.items() + ]) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() parser = None doc = parsexml_(inFileName, parser) rootNode = doc.getroot() @@ -24002,43 +32246,68 @@ def parse(inFileName, silence=False): rootTag = 'Property' rootClass = Property rootObj = rootClass.factory() - rootObj.build(rootNode) - # Enable Python to collect the space used by the DOM. - doc = None + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None ## if not silence: ## sys.stdout.write('\n') ## rootObj.export( ## sys.stdout, 0, name_=rootTag, -## namespacedef_='', +## namespacedef_=namespacedefs, ## pretty_print=True) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) return rootObj -def parseEtree(inFileName, silence=False): +def parseEtree(inFileName, silence=False, print_warnings=True, + mapping=None, reverse_mapping=None, nsmap=None): parser = None doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'Property' rootClass = Property rootObj = rootClass.factory() - rootObj.build(rootNode) + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, name_=rootTag, mapping_=mapping, + reverse_mapping_=reverse_mapping, nsmap_=nsmap) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) # Enable Python to collect the space used by the DOM. - doc = None - mapping = {} - rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) - reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not SaveElementTreeNode: + doc = None + rootNode = None ## if not silence: ## content = etree_.tostring( ## rootElement, pretty_print=True, ## xml_declaration=True, encoding="utf-8") -## sys.stdout.write(content) +## sys.stdout.write(str(content)) ## sys.stdout.write('\n') - return rootObj, rootElement, mapping, reverse_mapping + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping -def parseString(inString, silence=False): +def parseString(inString, silence=False, print_warnings=True): '''Parse a string, create the object tree, and export it. Arguments: @@ -24049,39 +32318,58 @@ def parseString(inString, silence=False): ''' parser = None rootNode= parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'Property' rootClass = Property rootObj = rootClass.factory() - rootObj.build(rootNode) - # Enable Python to collect the space used by the DOM. + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None ## if not silence: ## sys.stdout.write('\n') ## rootObj.export( ## sys.stdout, 0, name_=rootTag, ## namespacedef_='') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) return rootObj -def parseLiteral(inFileName, silence=False): +def parseLiteral(inFileName, silence=False, print_warnings=True): parser = None doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'Property' rootClass = Property rootObj = rootClass.factory() - rootObj.build(rootNode) + rootObj.build(rootNode, gds_collector_=gds_collector) # Enable Python to collect the space used by the DOM. - doc = None + if not SaveElementTreeNode: + doc = None + rootNode = None ## if not silence: ## sys.stdout.write('#from nml import *\n\n') ## sys.stdout.write('import nml as model_\n\n') ## sys.stdout.write('rootObj = model_.rootClass(\n') ## rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) ## sys.stdout.write(')\n') + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ('-' * 50) + '\n' + sys.stderr.write(separator) + sys.stderr.write('----- Warnings -- count: {} -----\n'.format( + len(gds_collector.get_messages()), )) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) return rootObj @@ -24097,6 +32385,670 @@ def main(): #import pdb; pdb.set_trace() main() +RenameMappings_ = { +} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = {'http://www.neuroml.org/schema/neuroml2': [('NmlId', 'NeuroML_v2.2.xsd', 'ST'), + ('Nml2Quantity', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_none', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_voltage', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_length', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_resistance', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_resistivity', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_conductance', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_conductanceDensity', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_permeability', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_time', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_pertime', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_capacitance', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_specificCapacitance', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_concentration', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_current', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_currentDensity', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_temperature', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_rhoFactor', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Nml2Quantity_conductancePerVoltage', + 'NeuroML_v2.2.xsd', + 'ST'), + ('MetaId', + 'NeuroML_v2.2.xsd', + 'ST'), + ('NeuroLexId', + 'NeuroML_v2.2.xsd', + 'ST'), + ('NonNegativeInteger', + 'NeuroML_v2.2.xsd', + 'ST'), + ('PositiveInteger', + 'NeuroML_v2.2.xsd', + 'ST'), + ('DoubleGreaterThanZero', + 'NeuroML_v2.2.xsd', + 'ST'), + ('ZeroOrOne', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Notes', 'NeuroML_v2.2.xsd', 'ST'), + ('ZeroToOne', + 'NeuroML_v2.2.xsd', + 'ST'), + ('channelTypes', + 'NeuroML_v2.2.xsd', + 'ST'), + ('gateTypes', + 'NeuroML_v2.2.xsd', + 'ST'), + ('BlockTypes', + 'NeuroML_v2.2.xsd', + 'ST'), + ('PlasticityTypes', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Metric', + 'NeuroML_v2.2.xsd', + 'ST'), + ('networkTypes', + 'NeuroML_v2.2.xsd', + 'ST'), + ('allowedSpaces', + 'NeuroML_v2.2.xsd', + 'ST'), + ('populationTypes', + 'NeuroML_v2.2.xsd', + 'ST'), + ('Property', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Annotation', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ComponentType', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Constant', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Exposure', + 'NeuroML_v2.2.xsd', + 'CT'), + ('NamedDimensionalType', + 'NeuroML_v2.2.xsd', + 'CT'), + ('NamedDimensionalVariable', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Parameter', + 'NeuroML_v2.2.xsd', + 'CT'), + ('LEMS_Property', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Requirement', + 'NeuroML_v2.2.xsd', + 'CT'), + ('InstanceRequirement', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Dynamics', + 'NeuroML_v2.2.xsd', + 'CT'), + ('DerivedVariable', + 'NeuroML_v2.2.xsd', + 'CT'), + ('StateVariable', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ConditionalDerivedVariable', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Case', 'NeuroML_v2.2.xsd', 'CT'), + ('TimeDerivative', + 'NeuroML_v2.2.xsd', + 'CT'), + ('NeuroMLDocument', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IncludeType', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IonChannelScalable', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IonChannelKS', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IonChannel', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IonChannelHH', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IonChannelVShift', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Q10ConductanceScaling', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ClosedState', + 'NeuroML_v2.2.xsd', + 'CT'), + ('OpenState', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ForwardTransition', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ReverseTransition', + 'NeuroML_v2.2.xsd', + 'CT'), + ('TauInfTransition', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GateKS', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GateHHUndetermined', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GateHHRates', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GateHHTauInf', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GateHHRatesTauInf', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GateHHRatesTau', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GateHHRatesInf', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GateHHInstantaneous', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GateFractional', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GateFractionalSubgate', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Q10Settings', + 'NeuroML_v2.2.xsd', + 'CT'), + ('HHRate', + 'NeuroML_v2.2.xsd', + 'CT'), + ('HHVariable', + 'NeuroML_v2.2.xsd', + 'CT'), + ('HHTime', + 'NeuroML_v2.2.xsd', + 'CT'), + ('DecayingPoolConcentrationModel', + 'NeuroML_v2.2.xsd', + 'CT'), + ('FixedFactorConcentrationModel', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseVoltageDepSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseCurrentBasedSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseConductanceBasedSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseConductanceBasedSynapseTwo', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GapJunction', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SilentSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('LinearGradedSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GradedSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('AlphaCurrentSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('AlphaSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ExpOneSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ExpTwoSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ExpThreeSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('DoubleSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BlockingPlasticSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BlockMechanism', + 'NeuroML_v2.2.xsd', + 'CT'), + ('PlasticityMechanism', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseCell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IafTauCell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IafTauRefCell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IafCell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IafRefCell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IzhikevichCell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseCellMembPotCap', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Izhikevich2007Cell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('AdExIaFCell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('FitzHughNagumoCell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('FitzHughNagumo1969Cell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('PinskyRinzelCA3Cell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Cell', 'NeuroML_v2.2.xsd', 'CT'), + ('Cell2CaPools', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Morphology', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Segment', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SegmentParent', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Point3DWithDiam', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SegmentGroup', + 'NeuroML_v2.2.xsd', + 'CT'), + ('InhomogeneousParameter', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ProximalDetails', + 'NeuroML_v2.2.xsd', + 'CT'), + ('DistalDetails', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Member', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Include', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Path', 'NeuroML_v2.2.xsd', 'CT'), + ('SubTree', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SegmentEndPoint', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BiophysicalProperties', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BiophysicalProperties2CaPools', + 'NeuroML_v2.2.xsd', + 'CT'), + ('MembraneProperties', + 'NeuroML_v2.2.xsd', + 'CT'), + ('MembraneProperties2CaPools', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SpikeThresh', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SpecificCapacitance', + 'NeuroML_v2.2.xsd', + 'CT'), + ('InitMembPotential', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Resistivity', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ChannelPopulation', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ChannelDensityNonUniform', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ChannelDensityNonUniformNernst', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ChannelDensityNonUniformGHK', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ChannelDensity', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ChannelDensityVShift', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ChannelDensityNernst', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ChannelDensityNernstCa2', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ChannelDensityGHK', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ChannelDensityGHK2', + 'NeuroML_v2.2.xsd', + 'CT'), + ('VariableParameter', + 'NeuroML_v2.2.xsd', + 'CT'), + ('InhomogeneousValue', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Species', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ConcentrationModel_D', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IntracellularProperties', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IntracellularProperties2CaPools', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ExtracellularProperties', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ExtracellularPropertiesLocal', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ReactionScheme', + 'NeuroML_v2.2.xsd', + 'CT'), + ('PulseGenerator', + 'NeuroML_v2.2.xsd', + 'CT'), + ('PulseGeneratorDL', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SineGenerator', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SineGeneratorDL', + 'NeuroML_v2.2.xsd', + 'CT'), + ('RampGenerator', + 'NeuroML_v2.2.xsd', + 'CT'), + ('RampGeneratorDL', + 'NeuroML_v2.2.xsd', + 'CT'), + ('CompoundInput', + 'NeuroML_v2.2.xsd', + 'CT'), + ('CompoundInputDL', + 'NeuroML_v2.2.xsd', + 'CT'), + ('VoltageClamp', + 'NeuroML_v2.2.xsd', + 'CT'), + ('VoltageClampTriple', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Spike', 'NeuroML_v2.2.xsd', 'CT'), + ('SpikeArray', + 'NeuroML_v2.2.xsd', + 'CT'), + ('TimedSynapticInput', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SpikeGenerator', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SpikeGeneratorRandom', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SpikeGeneratorPoisson', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SpikeGeneratorRefPoisson', + 'NeuroML_v2.2.xsd', + 'CT'), + ('PoissonFiringSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('TransientPoissonFiringSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Network', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Space', 'NeuroML_v2.2.xsd', 'CT'), + ('SpaceStructure', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Region', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Population', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Layout', + 'NeuroML_v2.2.xsd', + 'CT'), + ('UnstructuredLayout', + 'NeuroML_v2.2.xsd', + 'CT'), + ('RandomLayout', + 'NeuroML_v2.2.xsd', + 'CT'), + ('GridLayout', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Instance', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Location', + 'NeuroML_v2.2.xsd', + 'CT'), + ('CellSet', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SynapticConnection', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseProjection', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Projection', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseConnection', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseConnectionOldFormat', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseConnectionNewFormat', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Connection', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ConnectionWD', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ElectricalProjection', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ElectricalConnection', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ElectricalConnectionInstance', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ElectricalConnectionInstanceW', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ContinuousProjection', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ContinuousConnection', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ContinuousConnectionInstance', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ContinuousConnectionInstanceW', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ExplicitInput', + 'NeuroML_v2.2.xsd', + 'CT'), + ('InputList', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Input', 'NeuroML_v2.2.xsd', 'CT'), + ('InputW', + 'NeuroML_v2.2.xsd', + 'CT'), + ('basePyNNCell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('basePyNNIaFCell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('basePyNNIaFCondCell', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IF_curr_alpha', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IF_curr_exp', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IF_cond_alpha', + 'NeuroML_v2.2.xsd', + 'CT'), + ('IF_cond_exp', + 'NeuroML_v2.2.xsd', + 'CT'), + ('EIF_cond_exp_isfa_ista', + 'NeuroML_v2.2.xsd', + 'CT'), + ('EIF_cond_alpha_isfa_ista', + 'NeuroML_v2.2.xsd', + 'CT'), + ('HH_cond_exp', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BasePynnSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ExpCondSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('AlphaCondSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('ExpCurrSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('AlphaCurrSynapse', + 'NeuroML_v2.2.xsd', + 'CT'), + ('SpikeSourcePoisson', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseWithoutId', + 'NeuroML_v2.2.xsd', + 'CT'), + ('BaseNonNegativeIntegerId', + 'NeuroML_v2.2.xsd', + 'CT'), + ('Base', 'NeuroML_v2.2.xsd', 'CT'), + ('Standalone', + 'NeuroML_v2.2.xsd', + 'CT')]} __all__ = [ "AdExIaFCell", From 027574352d7c8fb2a2108568db946ab9411bd27e Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Thu, 26 Aug 2021 17:19:02 +0100 Subject: [PATCH 039/136] fix: minor tweaks to workaround probably generateDS bug See: https://sourceforge.net/p/generateds/tickets/13/ --- neuroml/nml/README.md | 10 ++++++++++ neuroml/nml/nml.py | 4 ++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/neuroml/nml/README.md b/neuroml/nml/README.md index 5eacc607..4126d3db 100644 --- a/neuroml/nml/README.md +++ b/neuroml/nml/README.md @@ -29,6 +29,16 @@ If these are not included in the output, generateds_config.py has not run, and t ### Changelog +#### August 26, 2021 + +Author: @sanjayankur31 + +Generate with Python 3.9, generateDS version 2.39.9 + +- all tests pass +- requires us to not use `--use-getter-setter=none`, which causes test failures. +- requires us to manually patch one or two conversions to work around https://sourceforge.net/p/generateds/tickets/13/ + #### March 26, 2020 Author: @sanjayankur31 diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 37b37a13..fd9444b0 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -206,7 +206,7 @@ def gds_format_base64(self, input_data, input_name=''): def gds_validate_base64(self, input_data, node=None, input_name=''): return input_data def gds_format_integer(self, input_data, input_name=''): - return '%d' % input_data + return '%d' % int(input_data) def gds_parse_integer(self, input_data, node=None, input_name=''): try: ival = int(input_data) @@ -233,7 +233,7 @@ def gds_validate_integer_list( raise_parse_error(node, 'Requires sequence of integer values') return values def gds_format_float(self, input_data, input_name=''): - return ('%.15f' % input_data).rstrip('0') + return ('%.15f' % float(input_data)).rstrip('0') def gds_parse_float(self, input_data, node=None, input_name=''): try: fval_ = float(input_data) From cc9d45b2c204c04e92ce9cb72b918eb6366883ef Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 27 Aug 2021 11:55:38 +0100 Subject: [PATCH 040/136] feat: update using `--use-getter-setter=none` --- neuroml/nml/README.md | 4 +- neuroml/nml/nml.py | 4368 +---------------------------------------- 2 files changed, 11 insertions(+), 4361 deletions(-) diff --git a/neuroml/nml/README.md b/neuroml/nml/README.md index 4126d3db..4721bc34 100644 --- a/neuroml/nml/README.md +++ b/neuroml/nml/README.md @@ -36,8 +36,8 @@ Author: @sanjayankur31 Generate with Python 3.9, generateDS version 2.39.9 - all tests pass -- requires us to not use `--use-getter-setter=none`, which causes test failures. -- requires us to manually patch one or two conversions to work around https://sourceforge.net/p/generateds/tickets/13/ +- `--use-getter-setter=none`, which causes test failures because of probable generateDS bug. See: https://sourceforge.net/p/generateds/tickets/20/ +- also requires us to manually patch one or two conversions to work around https://sourceforge.net/p/generateds/tickets/13/ #### March 26, 2020 diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index fd9444b0..e7cc54f2 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,12 +2,12 @@ # -*- coding: utf-8 -*- # -# Generated Thu Aug 26 17:22:09 2021 by generateDS.py version 2.39.9. +# Generated Fri Aug 27 11:33:40 2021 by generateDS.py version 2.39.9. # Python 3.9.6 (default, Jul 16 2021, 00:00:00) [GCC 11.1.1 20210531 (Red Hat 11.1.1-3)] # # Command line options: # ('-o', 'nml.py') -# ('--use-getter-setter', 'new') +# ('--use-getter-setter', 'none') # ('--no-warnings', '') # ('--silence', '') # ('--user-methods', './helper_methods.py') @@ -16,7 +16,7 @@ # NeuroML_v2.2.xsd # # Command line: -# /home/asinha/.virtualenvs/generateds-39/bin/generateDS.py -o "nml.py" --use-getter-setter="new" --no-warnings --silence --user-methods="./helper_methods.py" NeuroML_v2.2.xsd +# /home/asinha/.virtualenvs/generateds-39/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --no-warnings --silence --user-methods="./helper_methods.py" NeuroML_v2.2.xsd # # Current working directory (os.getcwd()): # nml @@ -1064,18 +1064,6 @@ def factory(*args_, **kwargs_): else: return Property(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_tag(self): - return self.tag - def set_tag(self, tag): - self.tag = tag - def get_value(self): - return self.value - def set_value(self, value): - self.value = value def _hasContent(self): if ( @@ -1170,14 +1158,6 @@ def factory(*args_, **kwargs_): else: return Annotation(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_anytypeobjs_(self): return self.anytypeobjs_ - def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_ - def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value) - def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value def _hasContent(self): if ( self.anytypeobjs_ @@ -1235,7 +1215,7 @@ def _buildAttributes(self, node, attrs, already_processed): pass def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): content_ = self.gds_build_any(child_, 'Annotation') - self.add_anytypeobjs_(content_) + self.anytypeobjs_.append(content_) # end class Annotation @@ -1316,92 +1296,6 @@ def factory(*args_, **kwargs_): else: return ComponentType(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_Property(self): - return self.Property - def set_Property(self, Property): - self.Property = Property - def add_Property(self, value): - self.Property.append(value) - def insert_Property_at(self, index, value): - self.Property.insert(index, value) - def replace_Property_at(self, index, value): - self.Property[index] = value - def get_Parameter(self): - return self.Parameter - def set_Parameter(self, Parameter): - self.Parameter = Parameter - def add_Parameter(self, value): - self.Parameter.append(value) - def insert_Parameter_at(self, index, value): - self.Parameter.insert(index, value) - def replace_Parameter_at(self, index, value): - self.Parameter[index] = value - def get_Constant(self): - return self.Constant - def set_Constant(self, Constant): - self.Constant = Constant - def add_Constant(self, value): - self.Constant.append(value) - def insert_Constant_at(self, index, value): - self.Constant.insert(index, value) - def replace_Constant_at(self, index, value): - self.Constant[index] = value - def get_Exposure(self): - return self.Exposure - def set_Exposure(self, Exposure): - self.Exposure = Exposure - def add_Exposure(self, value): - self.Exposure.append(value) - def insert_Exposure_at(self, index, value): - self.Exposure.insert(index, value) - def replace_Exposure_at(self, index, value): - self.Exposure[index] = value - def get_Requirement(self): - return self.Requirement - def set_Requirement(self, Requirement): - self.Requirement = Requirement - def add_Requirement(self, value): - self.Requirement.append(value) - def insert_Requirement_at(self, index, value): - self.Requirement.insert(index, value) - def replace_Requirement_at(self, index, value): - self.Requirement[index] = value - def get_InstanceRequirement(self): - return self.InstanceRequirement - def set_InstanceRequirement(self, InstanceRequirement): - self.InstanceRequirement = InstanceRequirement - def add_InstanceRequirement(self, value): - self.InstanceRequirement.append(value) - def insert_InstanceRequirement_at(self, index, value): - self.InstanceRequirement.insert(index, value) - def replace_InstanceRequirement_at(self, index, value): - self.InstanceRequirement[index] = value - def get_Dynamics(self): - return self.Dynamics - def set_Dynamics(self, Dynamics): - self.Dynamics = Dynamics - def add_Dynamics(self, value): - self.Dynamics.append(value) - def insert_Dynamics_at(self, index, value): - self.Dynamics.insert(index, value) - def replace_Dynamics_at(self, index, value): - self.Dynamics[index] = value - def get_name(self): - return self.name - def set_name(self, name): - self.name = name - def get_extends(self): - return self.extends - def set_extends(self, extends): - self.extends = extends - def get_description(self): - return self.description - def set_description(self, description): - self.description = description def _hasContent(self): if ( self.Property or @@ -1575,26 +1469,6 @@ def factory(*args_, **kwargs_): else: return Constant(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_name(self): - return self.name - def set_name(self, name): - self.name = name - def get_dimension(self): - return self.dimension - def set_dimension(self, dimension): - self.dimension = dimension - def get_value(self): - return self.value - def set_value(self, value): - self.value = value - def get_description(self): - return self.description - def set_description(self, description): - self.description = description def validate_Nml2Quantity(self, value): # Validate type Nml2Quantity, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -1719,22 +1593,6 @@ def factory(*args_, **kwargs_): else: return Exposure(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_name(self): - return self.name - def set_name(self, name): - self.name = name - def get_dimension(self): - return self.dimension - def set_dimension(self, dimension): - self.dimension = dimension - def get_description(self): - return self.description - def set_description(self, description): - self.description = description def _hasContent(self): if ( @@ -1838,24 +1696,6 @@ def factory(*args_, **kwargs_): else: return NamedDimensionalType(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_name(self): - return self.name - def set_name(self, name): - self.name = name - def get_dimension(self): - return self.dimension - def set_dimension(self, dimension): - self.dimension = dimension - def get_description(self): - return self.description - def set_description(self, description): - self.description = description - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( @@ -1974,28 +1814,6 @@ def factory(*args_, **kwargs_): else: return NamedDimensionalVariable(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_name(self): - return self.name - def set_name(self, name): - self.name = name - def get_dimension(self): - return self.dimension - def set_dimension(self, dimension): - self.dimension = dimension - def get_description(self): - return self.description - def set_description(self, description): - self.description = description - def get_exposure(self): - return self.exposure - def set_exposure(self, exposure): - self.exposure = exposure - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( @@ -2109,10 +1927,6 @@ def factory(*args_, **kwargs_): else: return Parameter(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(Parameter, self)._hasContent() @@ -2193,14 +2007,6 @@ def factory(*args_, **kwargs_): else: return LEMS_Property(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_defaultValue(self): - return self.default_value - def set_defaultValue(self, default_value): - self.default_value = default_value def _hasContent(self): if ( super(LEMS_Property, self)._hasContent() @@ -2286,10 +2092,6 @@ def factory(*args_, **kwargs_): else: return Requirement(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(Requirement, self)._hasContent() @@ -2372,18 +2174,6 @@ def factory(*args_, **kwargs_): else: return InstanceRequirement(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_name(self): - return self.name - def set_name(self, name): - self.name = name - def get_type(self): - return self.type - def set_type(self, type): - self.type = type def _hasContent(self): if ( @@ -2497,50 +2287,6 @@ def factory(*args_, **kwargs_): else: return Dynamics(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_StateVariable(self): - return self.StateVariable - def set_StateVariable(self, StateVariable): - self.StateVariable = StateVariable - def add_StateVariable(self, value): - self.StateVariable.append(value) - def insert_StateVariable_at(self, index, value): - self.StateVariable.insert(index, value) - def replace_StateVariable_at(self, index, value): - self.StateVariable[index] = value - def get_DerivedVariable(self): - return self.DerivedVariable - def set_DerivedVariable(self, DerivedVariable): - self.DerivedVariable = DerivedVariable - def add_DerivedVariable(self, value): - self.DerivedVariable.append(value) - def insert_DerivedVariable_at(self, index, value): - self.DerivedVariable.insert(index, value) - def replace_DerivedVariable_at(self, index, value): - self.DerivedVariable[index] = value - def get_ConditionalDerivedVariable(self): - return self.ConditionalDerivedVariable - def set_ConditionalDerivedVariable(self, ConditionalDerivedVariable): - self.ConditionalDerivedVariable = ConditionalDerivedVariable - def add_ConditionalDerivedVariable(self, value): - self.ConditionalDerivedVariable.append(value) - def insert_ConditionalDerivedVariable_at(self, index, value): - self.ConditionalDerivedVariable.insert(index, value) - def replace_ConditionalDerivedVariable_at(self, index, value): - self.ConditionalDerivedVariable[index] = value - def get_TimeDerivative(self): - return self.TimeDerivative - def set_TimeDerivative(self, TimeDerivative): - self.TimeDerivative = TimeDerivative - def add_TimeDerivative(self, value): - self.TimeDerivative.append(value) - def insert_TimeDerivative_at(self, index, value): - self.TimeDerivative.insert(index, value) - def replace_TimeDerivative_at(self, index, value): - self.TimeDerivative[index] = value def _hasContent(self): if ( self.StateVariable or @@ -2663,18 +2409,6 @@ def factory(*args_, **kwargs_): else: return DerivedVariable(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_value(self): - return self.value - def set_value(self, value): - self.value = value - def get_select(self): - return self.select - def set_select(self, select): - self.select = select def _hasContent(self): if ( super(DerivedVariable, self)._hasContent() @@ -2766,10 +2500,6 @@ def factory(*args_, **kwargs_): else: return StateVariable(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(StateVariable, self)._hasContent() @@ -2856,20 +2586,6 @@ def factory(*args_, **kwargs_): else: return ConditionalDerivedVariable(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_Case(self): - return self.Case - def set_Case(self, Case): - self.Case = Case - def add_Case(self, value): - self.Case.append(value) - def insert_Case_at(self, index, value): - self.Case.insert(index, value) - def replace_Case_at(self, index, value): - self.Case[index] = value def _hasContent(self): if ( self.Case or @@ -2964,18 +2680,6 @@ def factory(*args_, **kwargs_): else: return Case(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_condition(self): - return self.condition - def set_condition(self, condition): - self.condition = condition - def get_value(self): - return self.value - def set_value(self, value): - self.value = value def _hasContent(self): if ( @@ -3068,18 +2772,6 @@ def factory(*args_, **kwargs_): else: return TimeDerivative(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_variable(self): - return self.variable - def set_variable(self, variable): - self.variable = variable - def get_value(self): - return self.value - def set_value(self, value): - self.value = value def _hasContent(self): if ( @@ -3169,14 +2861,6 @@ def factory(*args_, **kwargs_): else: return IncludeType(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_href(self): - return self.href - def set_href(self, href): - self.href = href def _hasContent(self): if ( @@ -3262,18 +2946,6 @@ def factory(*args_, **kwargs_): else: return Q10ConductanceScaling(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_q10Factor(self): - return self.q10_factor - def set_q10Factor(self, q10_factor): - self.q10_factor = q10_factor - def get_experimentalTemp(self): - return self.experimental_temp - def set_experimentalTemp(self, experimental_temp): - self.experimental_temp = experimental_temp def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -3396,26 +3068,6 @@ def factory(*args_, **kwargs_): else: return Q10Settings(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_type(self): - return self.type - def set_type(self, type): - self.type = type - def get_fixedQ10(self): - return self.fixed_q10 - def set_fixedQ10(self, fixed_q10): - self.fixed_q10 = fixed_q10 - def get_q10Factor(self): - return self.q10_factor - def set_q10Factor(self, q10_factor): - self.q10_factor = q10_factor - def get_experimentalTemp(self): - return self.experimental_temp - def set_experimentalTemp(self, experimental_temp): - self.experimental_temp = experimental_temp def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -3565,26 +3217,6 @@ def factory(*args_, **kwargs_): else: return HHRate(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_type(self): - return self.type - def set_type(self, type): - self.type = type - def get_rate(self): - return self.rate - def set_rate(self, rate): - self.rate = rate - def get_midpoint(self): - return self.midpoint - def set_midpoint(self, midpoint): - self.midpoint = midpoint - def get_scale(self): - return self.scale - def set_scale(self, scale): - self.scale = scale def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -3734,26 +3366,6 @@ def factory(*args_, **kwargs_): else: return HHVariable(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_type(self): - return self.type - def set_type(self, type): - self.type = type - def get_rate(self): - return self.rate - def set_rate(self, rate): - self.rate = rate - def get_midpoint(self): - return self.midpoint - def set_midpoint(self, midpoint): - self.midpoint = midpoint - def get_scale(self): - return self.scale - def set_scale(self, scale): - self.scale = scale def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -3895,30 +3507,6 @@ def factory(*args_, **kwargs_): else: return HHTime(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_type(self): - return self.type - def set_type(self, type): - self.type = type - def get_rate(self): - return self.rate - def set_rate(self, rate): - self.rate = rate - def get_midpoint(self): - return self.midpoint - def set_midpoint(self, midpoint): - self.midpoint = midpoint - def get_scale(self): - return self.scale - def set_scale(self, scale): - self.scale = scale - def get_tau(self): - return self.tau - def set_tau(self, tau): - self.tau = tau def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -4079,30 +3667,6 @@ def factory(*args_, **kwargs_): else: return BlockMechanism(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_type(self): - return self.type - def set_type(self, type): - self.type = type - def get_species(self): - return self.species - def set_species(self, species): - self.species = species - def get_blockConcentration(self): - return self.block_concentration - def set_blockConcentration(self, block_concentration): - self.block_concentration = block_concentration - def get_scalingConc(self): - return self.scaling_conc - def set_scalingConc(self, scaling_conc): - self.scaling_conc = scaling_conc - def get_scalingVolt(self): - return self.scaling_volt - def set_scalingVolt(self, scaling_volt): - self.scaling_volt = scaling_volt def validate_BlockTypes(self, value): # Validate type BlockTypes, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -4273,26 +3837,6 @@ def factory(*args_, **kwargs_): else: return PlasticityMechanism(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_type(self): - return self.type - def set_type(self, type): - self.type = type - def get_initReleaseProb(self): - return self.init_release_prob - def set_initReleaseProb(self, init_release_prob): - self.init_release_prob = init_release_prob - def get_tauRec(self): - return self.tau_rec - def set_tauRec(self, tau_rec): - self.tau_rec = tau_rec - def get_tauFac(self): - return self.tau_fac - def set_tauFac(self, tau_fac): - self.tau_fac = tau_fac def validate_PlasticityTypes(self, value): # Validate type PlasticityTypes, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -4443,18 +3987,6 @@ def factory(*args_, **kwargs_): else: return SegmentParent(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_segment(self): - return self.segments - def set_segment(self, segments): - self.segments = segments - def get_fractionAlong(self): - return self.fraction_along - def set_fractionAlong(self, fraction_along): - self.fraction_along = fraction_along def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -4584,26 +4116,6 @@ def factory(*args_, **kwargs_): else: return Point3DWithDiam(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_x(self): - return self.x - def set_x(self, x): - self.x = x - def get_y(self): - return self.y - def set_y(self, y): - self.y = y - def get_z(self): - return self.z - def set_z(self, z): - self.z = z - def get_diameter(self): - return self.diameter - def set_diameter(self, diameter): - self.diameter = diameter def validate_DoubleGreaterThanZero(self, value): # Validate type DoubleGreaterThanZero, a restriction on xs:double. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -4751,14 +4263,6 @@ def factory(*args_, **kwargs_): else: return ProximalDetails(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_translationStart(self): - return self.translation_start - def set_translationStart(self, translation_start): - self.translation_start = translation_start def _hasContent(self): if ( @@ -4842,14 +4346,6 @@ def factory(*args_, **kwargs_): else: return DistalDetails(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_normalizationEnd(self): - return self.normalization_end - def set_normalizationEnd(self, normalization_end): - self.normalization_end = normalization_end def _hasContent(self): if ( @@ -4933,14 +4429,6 @@ def factory(*args_, **kwargs_): else: return Member(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_segment(self): - return self.segments - def set_segment(self, segments): - self.segments = segments def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -5034,14 +4522,6 @@ def factory(*args_, **kwargs_): else: return Include(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -5139,18 +4619,6 @@ def factory(*args_, **kwargs_): else: return Path(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_from(self): - return self.from_ - def set_from(self, from_): - self.from_ = from_ - def get_to(self): - return self.to - def set_to(self, to): - self.to = to def _hasContent(self): if ( self.from_ is not None or @@ -5251,18 +4719,6 @@ def factory(*args_, **kwargs_): else: return SubTree(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_from(self): - return self.from_ - def set_from(self, from_): - self.from_ = from_ - def get_to(self): - return self.to - def set_to(self, to): - self.to = to def _hasContent(self): if ( self.from_ is not None or @@ -5360,14 +4816,6 @@ def factory(*args_, **kwargs_): else: return SegmentEndPoint(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_segment(self): - return self.segments - def set_segment(self, segments): - self.segments = segments def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -5531,132 +4979,6 @@ def factory(*args_, **kwargs_): else: return MembraneProperties(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_channelPopulation(self): - return self.channel_populations - def set_channelPopulation(self, channel_populations): - self.channel_populations = channel_populations - def add_channelPopulation(self, value): - self.channel_populations.append(value) - def insert_channelPopulation_at(self, index, value): - self.channel_populations.insert(index, value) - def replace_channelPopulation_at(self, index, value): - self.channel_populations[index] = value - def get_channelDensity(self): - return self.channel_densities - def set_channelDensity(self, channel_densities): - self.channel_densities = channel_densities - def add_channelDensity(self, value): - self.channel_densities.append(value) - def insert_channelDensity_at(self, index, value): - self.channel_densities.insert(index, value) - def replace_channelDensity_at(self, index, value): - self.channel_densities[index] = value - def get_channelDensityVShift(self): - return self.channel_density_v_shifts - def set_channelDensityVShift(self, channel_density_v_shifts): - self.channel_density_v_shifts = channel_density_v_shifts - def add_channelDensityVShift(self, value): - self.channel_density_v_shifts.append(value) - def insert_channelDensityVShift_at(self, index, value): - self.channel_density_v_shifts.insert(index, value) - def replace_channelDensityVShift_at(self, index, value): - self.channel_density_v_shifts[index] = value - def get_channelDensityNernst(self): - return self.channel_density_nernsts - def set_channelDensityNernst(self, channel_density_nernsts): - self.channel_density_nernsts = channel_density_nernsts - def add_channelDensityNernst(self, value): - self.channel_density_nernsts.append(value) - def insert_channelDensityNernst_at(self, index, value): - self.channel_density_nernsts.insert(index, value) - def replace_channelDensityNernst_at(self, index, value): - self.channel_density_nernsts[index] = value - def get_channelDensityGHK(self): - return self.channel_density_ghks - def set_channelDensityGHK(self, channel_density_ghks): - self.channel_density_ghks = channel_density_ghks - def add_channelDensityGHK(self, value): - self.channel_density_ghks.append(value) - def insert_channelDensityGHK_at(self, index, value): - self.channel_density_ghks.insert(index, value) - def replace_channelDensityGHK_at(self, index, value): - self.channel_density_ghks[index] = value - def get_channelDensityGHK2(self): - return self.channel_density_ghk2s - def set_channelDensityGHK2(self, channel_density_ghk2s): - self.channel_density_ghk2s = channel_density_ghk2s - def add_channelDensityGHK2(self, value): - self.channel_density_ghk2s.append(value) - def insert_channelDensityGHK2_at(self, index, value): - self.channel_density_ghk2s.insert(index, value) - def replace_channelDensityGHK2_at(self, index, value): - self.channel_density_ghk2s[index] = value - def get_channelDensityNonUniform(self): - return self.channel_density_non_uniforms - def set_channelDensityNonUniform(self, channel_density_non_uniforms): - self.channel_density_non_uniforms = channel_density_non_uniforms - def add_channelDensityNonUniform(self, value): - self.channel_density_non_uniforms.append(value) - def insert_channelDensityNonUniform_at(self, index, value): - self.channel_density_non_uniforms.insert(index, value) - def replace_channelDensityNonUniform_at(self, index, value): - self.channel_density_non_uniforms[index] = value - def get_channelDensityNonUniformNernst(self): - return self.channel_density_non_uniform_nernsts - def set_channelDensityNonUniformNernst(self, channel_density_non_uniform_nernsts): - self.channel_density_non_uniform_nernsts = channel_density_non_uniform_nernsts - def add_channelDensityNonUniformNernst(self, value): - self.channel_density_non_uniform_nernsts.append(value) - def insert_channelDensityNonUniformNernst_at(self, index, value): - self.channel_density_non_uniform_nernsts.insert(index, value) - def replace_channelDensityNonUniformNernst_at(self, index, value): - self.channel_density_non_uniform_nernsts[index] = value - def get_channelDensityNonUniformGHK(self): - return self.channel_density_non_uniform_ghks - def set_channelDensityNonUniformGHK(self, channel_density_non_uniform_ghks): - self.channel_density_non_uniform_ghks = channel_density_non_uniform_ghks - def add_channelDensityNonUniformGHK(self, value): - self.channel_density_non_uniform_ghks.append(value) - def insert_channelDensityNonUniformGHK_at(self, index, value): - self.channel_density_non_uniform_ghks.insert(index, value) - def replace_channelDensityNonUniformGHK_at(self, index, value): - self.channel_density_non_uniform_ghks[index] = value - def get_spikeThresh(self): - return self.spike_threshes - def set_spikeThresh(self, spike_threshes): - self.spike_threshes = spike_threshes - def add_spikeThresh(self, value): - self.spike_threshes.append(value) - def insert_spikeThresh_at(self, index, value): - self.spike_threshes.insert(index, value) - def replace_spikeThresh_at(self, index, value): - self.spike_threshes[index] = value - def get_specificCapacitance(self): - return self.specific_capacitances - def set_specificCapacitance(self, specific_capacitances): - self.specific_capacitances = specific_capacitances - def add_specificCapacitance(self, value): - self.specific_capacitances.append(value) - def insert_specificCapacitance_at(self, index, value): - self.specific_capacitances.insert(index, value) - def replace_specificCapacitance_at(self, index, value): - self.specific_capacitances[index] = value - def get_initMembPotential(self): - return self.init_memb_potentials - def set_initMembPotential(self, init_memb_potentials): - self.init_memb_potentials = init_memb_potentials - def add_initMembPotential(self, value): - self.init_memb_potentials.append(value) - def insert_initMembPotential_at(self, index, value): - self.init_memb_potentials.insert(index, value) - def replace_initMembPotential_at(self, index, value): - self.init_memb_potentials[index] = value - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( self.channel_populations or @@ -5861,20 +5183,6 @@ def factory(*args_, **kwargs_): else: return MembraneProperties2CaPools(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_channelDensityNernstCa2(self): - return self.channel_density_nernst_ca2s - def set_channelDensityNernstCa2(self, channel_density_nernst_ca2s): - self.channel_density_nernst_ca2s = channel_density_nernst_ca2s - def add_channelDensityNernstCa2(self, value): - self.channel_density_nernst_ca2s.append(value) - def insert_channelDensityNernstCa2_at(self, index, value): - self.channel_density_nernst_ca2s.insert(index, value) - def replace_channelDensityNernstCa2_at(self, index, value): - self.channel_density_nernst_ca2s[index] = value def _hasContent(self): if ( self.channel_density_nernst_ca2s or @@ -5972,18 +5280,6 @@ def factory(*args_, **kwargs_): else: return SpikeThresh(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_value(self): - return self.value - def set_value(self, value): - self.value = value - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -6103,18 +5399,6 @@ def factory(*args_, **kwargs_): else: return SpecificCapacitance(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_value(self): - return self.value - def set_value(self, value): - self.value = value - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups def validate_Nml2Quantity_specificCapacitance(self, value): # Validate type Nml2Quantity_specificCapacitance, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -6234,18 +5518,6 @@ def factory(*args_, **kwargs_): else: return InitMembPotential(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_value(self): - return self.value - def set_value(self, value): - self.value = value - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -6365,18 +5637,6 @@ def factory(*args_, **kwargs_): else: return Resistivity(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_value(self): - return self.value - def set_value(self, value): - self.value = value - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups def validate_Nml2Quantity_resistivity(self, value): # Validate type Nml2Quantity_resistivity, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -6496,22 +5756,6 @@ def factory(*args_, **kwargs_): else: return VariableParameter(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_inhomogeneousValue(self): - return self.inhomogeneous_value - def set_inhomogeneousValue(self, inhomogeneous_value): - self.inhomogeneous_value = inhomogeneous_value - def get_parameter(self): - return self.parameter - def set_parameter(self, parameter): - self.parameter = parameter - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups def _hasContent(self): if ( self.inhomogeneous_value is not None @@ -6615,18 +5859,6 @@ def factory(*args_, **kwargs_): else: return InhomogeneousValue(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_inhomogeneousParameter(self): - return self.inhomogeneous_parameters - def set_inhomogeneousParameter(self, inhomogeneous_parameters): - self.inhomogeneous_parameters = inhomogeneous_parameters - def get_value(self): - return self.value - def set_value(self, value): - self.value = value def _hasContent(self): if ( @@ -6735,34 +5967,6 @@ def factory(*args_, **kwargs_): else: return Species(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_id(self): - return self.id - def set_id(self, id): - self.id = id - def get_concentrationModel(self): - return self.concentration_model - def set_concentrationModel(self, concentration_model): - self.concentration_model = concentration_model - def get_ion(self): - return self.ion - def set_ion(self, ion): - self.ion = ion - def get_initialConcentration(self): - return self.initial_concentration - def set_initialConcentration(self, initial_concentration): - self.initial_concentration = initial_concentration - def get_initialExtConcentration(self): - return self.initial_ext_concentration - def set_initialExtConcentration(self, initial_ext_concentration): - self.initial_ext_concentration = initial_ext_concentration - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -6918,32 +6122,6 @@ def factory(*args_, **kwargs_): else: return IntracellularProperties(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_species(self): - return self.species - def set_species(self, species): - self.species = species - def add_species(self, value): - self.species.append(value) - def insert_species_at(self, index, value): - self.species.insert(index, value) - def replace_species_at(self, index, value): - self.species[index] = value - def get_resistivity(self): - return self.resistivities - def set_resistivity(self, resistivities): - self.resistivities = resistivities - def add_resistivity(self, value): - self.resistivities.append(value) - def insert_resistivity_at(self, index, value): - self.resistivities.insert(index, value) - def replace_resistivity_at(self, index, value): - self.resistivities[index] = value - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( self.species or @@ -7050,10 +6228,6 @@ def factory(*args_, **kwargs_): else: return IntracellularProperties2CaPools(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(IntracellularProperties2CaPools, self)._hasContent() @@ -7136,20 +6310,6 @@ def factory(*args_, **kwargs_): else: return ExtracellularPropertiesLocal(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_species(self): - return self.species - def set_species(self, species): - self.species = species - def add_species(self, value): - self.species.append(value) - def insert_species_at(self, index, value): - self.species.insert(index, value) - def replace_species_at(self, index, value): - self.species[index] = value def _hasContent(self): if ( self.species @@ -7253,34 +6413,6 @@ def factory(*args_, **kwargs_): else: return SpaceStructure(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_xSpacing(self): - return self.x_spacing - def set_xSpacing(self, x_spacing): - self.x_spacing = x_spacing - def get_ySpacing(self): - return self.y_spacing - def set_ySpacing(self, y_spacing): - self.y_spacing = y_spacing - def get_zSpacing(self): - return self.z_spacing - def set_zSpacing(self, z_spacing): - self.z_spacing = z_spacing - def get_xStart(self): - return self.x_start - def set_xStart(self, x_start): - self.x_start = x_start - def get_yStart(self): - return self.y_start - def set_yStart(self, y_start): - self.y_start = y_start - def get_zStart(self): - return self.z_start - def set_zStart(self, z_start): - self.z_start = z_start def _hasContent(self): if ( @@ -7413,26 +6545,6 @@ def factory(*args_, **kwargs_): else: return Layout(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_random(self): - return self.random - def set_random(self, random): - self.random = random - def get_grid(self): - return self.grid - def set_grid(self, grid): - self.grid = grid - def get_unstructured(self): - return self.unstructured - def set_unstructured(self, unstructured): - self.unstructured = unstructured - def get_space(self): - return self.spaces - def set_space(self, spaces): - self.spaces = spaces def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -7556,14 +6668,6 @@ def factory(*args_, **kwargs_): else: return UnstructuredLayout(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_number(self): - return self.number - def set_number(self, number): - self.number = number def _hasContent(self): if ( @@ -7651,18 +6755,6 @@ def factory(*args_, **kwargs_): else: return RandomLayout(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_number(self): - return self.number - def set_number(self, number): - self.number = number - def get_region(self): - return self.regions - def set_region(self, regions): - self.regions = regions def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -7772,22 +6864,6 @@ def factory(*args_, **kwargs_): else: return GridLayout(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_xSize(self): - return self.x_size - def set_xSize(self, x_size): - self.x_size = x_size - def get_ySize(self): - return self.y_size - def set_ySize(self, y_size): - self.y_size = y_size - def get_zSize(self): - return self.z_size - def set_zSize(self, z_size): - self.z_size = z_size def _hasContent(self): if ( @@ -7902,30 +6978,6 @@ def factory(*args_, **kwargs_): else: return Instance(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_location(self): - return self.location - def set_location(self, location): - self.location = location - def get_id(self): - return self.id - def set_id(self, id): - self.id = id - def get_i(self): - return self.i - def set_i(self, i): - self.i = i - def get_j(self): - return self.j - def set_j(self, j): - self.j = j - def get_k(self): - return self.k - def set_k(self, k): - self.k = k def _hasContent(self): if ( self.location is not None @@ -8063,22 +7115,6 @@ def factory(*args_, **kwargs_): else: return Location(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_x(self): - return self.x - def set_x(self, x): - self.x = x - def get_y(self): - return self.y - def set_y(self, y): - self.y = y - def get_z(self): - return self.z - def set_z(self, z): - self.z = z def _hasContent(self): if ( @@ -8207,26 +7243,6 @@ def factory(*args_, **kwargs_): else: return SynapticConnection(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_from(self): - return self.from_ - def set_from(self, from_): - self.from_ = from_ - def get_to(self): - return self.to - def set_to(self, to): - self.to = to - def get_synapse(self): - return self.synapse - def set_synapse(self, synapse): - self.synapse = synapse - def get_destination(self): - return self.destination - def set_destination(self, destination): - self.destination = destination def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -8373,22 +7389,6 @@ def factory(*args_, **kwargs_): else: return ExplicitInput(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_target(self): - return self.target - def set_target(self, target): - self.target = target - def get_input(self): - return self.input - def set_input(self, input): - self.input = input - def get_destination(self): - return self.destination - def set_destination(self, destination): - self.destination = destination def _hasContent(self): if ( @@ -8553,32 +7553,6 @@ def factory(*args_, **kwargs_): else: return Input(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_id(self): - return self.id - def set_id(self, id): - self.id = id - def get_target(self): - return self.target - def set_target(self, target): - self.target = target - def get_destination(self): - return self.destination - def set_destination(self, destination): - self.destination = destination - def get_segmentId(self): - return self.segment_id - def set_segmentId(self, segment_id): - self.segment_id = segment_id - def get_fractionAlong(self): - return self.fraction_along - def set_fractionAlong(self, fraction_along): - self.fraction_along = fraction_along - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -8779,14 +7753,6 @@ def factory(*args_, **kwargs_): else: return InputW(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_weight(self): - return self.weight - def set_weight(self, weight): - self.weight = weight def _hasContent(self): if ( super(InputW, self)._hasContent() @@ -8891,16 +7857,6 @@ def factory(*args_, **kwargs_): else: return BaseWithoutId(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_neuroLexId(self): - return self.neuro_lex_id - def set_neuroLexId(self, neuro_lex_id): - self.neuro_lex_id = neuro_lex_id - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NeuroLexId(self, value): # Validate type NeuroLexId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -9012,16 +7968,6 @@ def factory(*args_, **kwargs_): else: return BaseNonNegativeIntegerId(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_id(self): - return self.id - def set_id(self, id): - self.id = id - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -9136,16 +8082,6 @@ def factory(*args_, **kwargs_): else: return Base(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_id(self): - return self.id - def set_id(self, id): - self.id = id - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -9274,34 +8210,6 @@ def factory(*args_, **kwargs_): else: return Standalone(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_notes(self): - return self.notes - def set_notes(self, notes): - self.notes = notes - def get_property(self): - return self.properties - def set_property(self, properties): - self.properties = properties - def add_property(self, value): - self.properties.append(value) - def insert_property_at(self, index, value): - self.properties.insert(index, value) - def replace_property_at(self, index, value): - self.properties[index] = value - def get_annotation(self): - return self.annotation - def set_annotation(self, annotation): - self.annotation = annotation - def get_metaid(self): - return self.metaid - def set_metaid(self, metaid): - self.metaid = metaid - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Notes(self, value): result = True # Validate type Notes, a restriction on xs:string. @@ -9463,22 +8371,6 @@ def factory(*args_, **kwargs_): else: return SpikeSourcePoisson(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_start(self): - return self.start - def set_start(self, start): - self.start = start - def get_duration(self): - return self.duration - def set_duration(self, duration): - self.duration = duration - def get_rate(self): - return self.rate - def set_rate(self, rate): - self.rate = rate def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -9623,38 +8515,6 @@ def factory(*args_, **kwargs_): else: return InputList(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_input(self): - return self.input - def set_input(self, input): - self.input = input - def add_input(self, value): - self.input.append(value) - def insert_input_at(self, index, value): - self.input.insert(index, value) - def replace_input_at(self, index, value): - self.input[index] = value - def get_inputW(self): - return self.input_ws - def set_inputW(self, input_ws): - self.input_ws = input_ws - def add_inputW(self, value): - self.input_ws.append(value) - def insert_inputW_at(self, index, value): - self.input_ws.insert(index, value) - def replace_inputW_at(self, index, value): - self.input_ws[index] = value - def get_population(self): - return self.populations - def set_population(self, populations): - self.populations = populations - def get_component(self): - return self.component - def set_component(self, component): - self.component = component def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -9842,12 +8702,6 @@ def factory(*args_, **kwargs_): else: return BaseConnection(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( super(BaseConnection, self)._hasContent() @@ -9947,20 +8801,6 @@ def factory(*args_, **kwargs_): else: return BaseProjection(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_presynapticPopulation(self): - return self.presynaptic_population - def set_presynapticPopulation(self, presynaptic_population): - self.presynaptic_population = presynaptic_population - def get_postsynapticPopulation(self): - return self.postsynaptic_population - def set_postsynapticPopulation(self, postsynaptic_population): - self.postsynaptic_population = postsynaptic_population - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -10085,18 +8925,6 @@ def factory(*args_, **kwargs_): else: return CellSet(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_anytypeobjs_(self): return self.anytypeobjs_ - def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_ - def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value) - def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value - def get_select(self): - return self.select - def set_select(self, select): - self.select = select def _hasContent(self): if ( self.anytypeobjs_ or @@ -10163,7 +8991,7 @@ def _buildAttributes(self, node, attrs, already_processed): super(CellSet, self)._buildAttributes(node, attrs, already_processed) def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): content_ = self.gds_build_any(child_, 'CellSet') - self.add_anytypeobjs_(content_) + self.anytypeobjs_.append(content_) super(CellSet, self)._buildChildren(child_, node, nodeName_, True) # end class CellSet @@ -10213,40 +9041,6 @@ def factory(*args_, **kwargs_): else: return Population(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_layout(self): - return self.layout - def set_layout(self, layout): - self.layout = layout - def get_instance(self): - return self.instances - def set_instance(self, instances): - self.instances = instances - def add_instance(self, value): - self.instances.append(value) - def insert_instance_at(self, index, value): - self.instances.insert(index, value) - def replace_instance_at(self, index, value): - self.instances[index] = value - def get_component(self): - return self.component - def set_component(self, component): - self.component = component - def get_size(self): - return self.size - def set_size(self, size): - self.size = size - def get_type(self): - return self.type - def set_type(self, type): - self.type = type - def get_extracellularProperties(self): - return self.extracellular_properties - def set_extracellularProperties(self, extracellular_properties): - self.extracellular_properties = extracellular_properties def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -10466,18 +9260,6 @@ def factory(*args_, **kwargs_): else: return Region(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_anytypeobjs_(self): return self.anytypeobjs_ - def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_ - def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value) - def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value - def get_space(self): - return self.spaces - def set_space(self, spaces): - self.spaces = spaces def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -10556,7 +9338,7 @@ def _buildAttributes(self, node, attrs, already_processed): super(Region, self)._buildAttributes(node, attrs, already_processed) def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): content_ = self.gds_build_any(child_, 'Region') - self.add_anytypeobjs_(content_) + self.anytypeobjs_.append(content_) super(Region, self)._buildChildren(child_, node, nodeName_, True) # end class Region @@ -10591,18 +9373,6 @@ def factory(*args_, **kwargs_): else: return Space(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_structure(self): - return self.structure - def set_structure(self, structure): - self.structure = structure - def get_basedOn(self): - return self.based_on - def set_basedOn(self, based_on): - self.based_on = based_on def validate_allowedSpaces(self, value): # Validate type allowedSpaces, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -10785,128 +9555,6 @@ def factory(*args_, **kwargs_): else: return Network(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_space(self): - return self.spaces - def set_space(self, spaces): - self.spaces = spaces - def add_space(self, value): - self.spaces.append(value) - def insert_space_at(self, index, value): - self.spaces.insert(index, value) - def replace_space_at(self, index, value): - self.spaces[index] = value - def get_region(self): - return self.regions - def set_region(self, regions): - self.regions = regions - def add_region(self, value): - self.regions.append(value) - def insert_region_at(self, index, value): - self.regions.insert(index, value) - def replace_region_at(self, index, value): - self.regions[index] = value - def get_extracellularProperties(self): - return self.extracellular_properties - def set_extracellularProperties(self, extracellular_properties): - self.extracellular_properties = extracellular_properties - def add_extracellularProperties(self, value): - self.extracellular_properties.append(value) - def insert_extracellularProperties_at(self, index, value): - self.extracellular_properties.insert(index, value) - def replace_extracellularProperties_at(self, index, value): - self.extracellular_properties[index] = value - def get_population(self): - return self.populations - def set_population(self, populations): - self.populations = populations - def add_population(self, value): - self.populations.append(value) - def insert_population_at(self, index, value): - self.populations.insert(index, value) - def replace_population_at(self, index, value): - self.populations[index] = value - def get_cellSet(self): - return self.cell_sets - def set_cellSet(self, cell_sets): - self.cell_sets = cell_sets - def add_cellSet(self, value): - self.cell_sets.append(value) - def insert_cellSet_at(self, index, value): - self.cell_sets.insert(index, value) - def replace_cellSet_at(self, index, value): - self.cell_sets[index] = value - def get_synapticConnection(self): - return self.synaptic_connections - def set_synapticConnection(self, synaptic_connections): - self.synaptic_connections = synaptic_connections - def add_synapticConnection(self, value): - self.synaptic_connections.append(value) - def insert_synapticConnection_at(self, index, value): - self.synaptic_connections.insert(index, value) - def replace_synapticConnection_at(self, index, value): - self.synaptic_connections[index] = value - def get_projection(self): - return self.projections - def set_projection(self, projections): - self.projections = projections - def add_projection(self, value): - self.projections.append(value) - def insert_projection_at(self, index, value): - self.projections.insert(index, value) - def replace_projection_at(self, index, value): - self.projections[index] = value - def get_electricalProjection(self): - return self.electrical_projections - def set_electricalProjection(self, electrical_projections): - self.electrical_projections = electrical_projections - def add_electricalProjection(self, value): - self.electrical_projections.append(value) - def insert_electricalProjection_at(self, index, value): - self.electrical_projections.insert(index, value) - def replace_electricalProjection_at(self, index, value): - self.electrical_projections[index] = value - def get_continuousProjection(self): - return self.continuous_projections - def set_continuousProjection(self, continuous_projections): - self.continuous_projections = continuous_projections - def add_continuousProjection(self, value): - self.continuous_projections.append(value) - def insert_continuousProjection_at(self, index, value): - self.continuous_projections.insert(index, value) - def replace_continuousProjection_at(self, index, value): - self.continuous_projections[index] = value - def get_explicitInput(self): - return self.explicit_inputs - def set_explicitInput(self, explicit_inputs): - self.explicit_inputs = explicit_inputs - def add_explicitInput(self, value): - self.explicit_inputs.append(value) - def insert_explicitInput_at(self, index, value): - self.explicit_inputs.insert(index, value) - def replace_explicitInput_at(self, index, value): - self.explicit_inputs[index] = value - def get_inputList(self): - return self.input_lists - def set_inputList(self, input_lists): - self.input_lists = input_lists - def add_inputList(self, value): - self.input_lists.append(value) - def insert_inputList_at(self, index, value): - self.input_lists.insert(index, value) - def replace_inputList_at(self, index, value): - self.input_lists[index] = value - def get_type(self): - return self.type - def set_type(self, type): - self.type = type - def get_temperature(self): - return self.temperature - def set_temperature(self, temperature): - self.temperature = temperature def validate_networkTypes(self, value): # Validate type networkTypes, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -11208,30 +9856,6 @@ def factory(*args_, **kwargs_): else: return TransientPoissonFiringSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_averageRate(self): - return self.average_rate - def set_averageRate(self, average_rate): - self.average_rate = average_rate - def get_delay(self): - return self.delay - def set_delay(self, delay): - self.delay = delay - def get_duration(self): - return self.duration - def set_duration(self, duration): - self.duration = duration - def get_synapse(self): - return self.synapse - def set_synapse(self, synapse): - self.synapse = synapse - def get_spikeTarget(self): - return self.spike_target - def set_spikeTarget(self, spike_target): - self.spike_target = spike_target def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -11378,22 +10002,6 @@ def factory(*args_, **kwargs_): else: return PoissonFiringSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_averageRate(self): - return self.average_rate - def set_averageRate(self, average_rate): - self.average_rate = average_rate - def get_synapse(self): - return self.synapse - def set_synapse(self, synapse): - self.synapse = synapse - def get_spikeTarget(self): - return self.spike_target - def set_spikeTarget(self, spike_target): - self.spike_target = spike_target def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -11508,16 +10116,6 @@ def factory(*args_, **kwargs_): else: return SpikeGeneratorPoisson(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_averageRate(self): - return self.average_rate - def set_averageRate(self, average_rate): - self.average_rate = average_rate - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -11632,18 +10230,6 @@ def factory(*args_, **kwargs_): else: return SpikeGeneratorRandom(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_maxISI(self): - return self.max_isi - def set_maxISI(self, max_isi): - self.max_isi = max_isi - def get_minISI(self): - return self.min_isi - def set_minISI(self, min_isi): - self.min_isi = min_isi def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -11751,14 +10337,6 @@ def factory(*args_, **kwargs_): else: return SpikeGenerator(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_period(self): - return self.period - def set_period(self, period): - self.period = period def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -11867,28 +10445,6 @@ def factory(*args_, **kwargs_): else: return TimedSynapticInput(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_spike(self): - return self.spikes - def set_spike(self, spikes): - self.spikes = spikes - def add_spike(self, value): - self.spikes.append(value) - def insert_spike_at(self, index, value): - self.spikes.insert(index, value) - def replace_spike_at(self, index, value): - self.spikes[index] = value - def get_synapse(self): - return self.synapse - def set_synapse(self, synapse): - self.synapse = synapse - def get_spikeTarget(self): - return self.spike_target - def set_spikeTarget(self, spike_target): - self.spike_target = spike_target def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -12010,20 +10566,6 @@ def factory(*args_, **kwargs_): else: return SpikeArray(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_spike(self): - return self.spikes - def set_spike(self, spikes): - self.spikes = spikes - def add_spike(self, value): - self.spikes.append(value) - def insert_spike_at(self, index, value): - self.spikes.insert(index, value) - def replace_spike_at(self, index, value): - self.spikes[index] = value def _hasContent(self): if ( self.spikes or @@ -12116,14 +10658,6 @@ def factory(*args_, **kwargs_): else: return Spike(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_time(self): - return self.time - def set_time(self, time): - self.time = time def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -12241,38 +10775,6 @@ def factory(*args_, **kwargs_): else: return VoltageClampTriple(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_active(self): - return self.active - def set_active(self, active): - self.active = active - def get_delay(self): - return self.delay - def set_delay(self, delay): - self.delay = delay - def get_duration(self): - return self.duration - def set_duration(self, duration): - self.duration = duration - def get_conditioningVoltage(self): - return self.conditioning_voltage - def set_conditioningVoltage(self, conditioning_voltage): - self.conditioning_voltage = conditioning_voltage - def get_testingVoltage(self): - return self.testing_voltage - def set_testingVoltage(self, testing_voltage): - self.testing_voltage = testing_voltage - def get_returnVoltage(self): - return self.return_voltage - def set_returnVoltage(self, return_voltage): - self.return_voltage = return_voltage - def get_simpleSeriesResistance(self): - return self.simple_series_resistance - def set_simpleSeriesResistance(self, simple_series_resistance): - self.simple_series_resistance = simple_series_resistance def validate_ZeroOrOne(self, value): # Validate type ZeroOrOne, a restriction on xs:double. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -12465,26 +10967,6 @@ def factory(*args_, **kwargs_): else: return VoltageClamp(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_delay(self): - return self.delay - def set_delay(self, delay): - self.delay = delay - def get_duration(self): - return self.duration - def set_duration(self, duration): - self.duration = duration - def get_targetVoltage(self): - return self.target_voltage - def set_targetVoltage(self, target_voltage): - self.target_voltage = target_voltage - def get_simpleSeriesResistance(self): - return self.simple_series_resistance - def set_simpleSeriesResistance(self, simple_series_resistance): - self.simple_series_resistance = simple_series_resistance def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -12645,40 +11127,6 @@ def factory(*args_, **kwargs_): else: return CompoundInputDL(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_pulseGeneratorDL(self): - return self.pulse_generator_dls - def set_pulseGeneratorDL(self, pulse_generator_dls): - self.pulse_generator_dls = pulse_generator_dls - def add_pulseGeneratorDL(self, value): - self.pulse_generator_dls.append(value) - def insert_pulseGeneratorDL_at(self, index, value): - self.pulse_generator_dls.insert(index, value) - def replace_pulseGeneratorDL_at(self, index, value): - self.pulse_generator_dls[index] = value - def get_sineGeneratorDL(self): - return self.sine_generator_dls - def set_sineGeneratorDL(self, sine_generator_dls): - self.sine_generator_dls = sine_generator_dls - def add_sineGeneratorDL(self, value): - self.sine_generator_dls.append(value) - def insert_sineGeneratorDL_at(self, index, value): - self.sine_generator_dls.insert(index, value) - def replace_sineGeneratorDL_at(self, index, value): - self.sine_generator_dls[index] = value - def get_rampGeneratorDL(self): - return self.ramp_generator_dls - def set_rampGeneratorDL(self, ramp_generator_dls): - self.ramp_generator_dls = ramp_generator_dls - def add_rampGeneratorDL(self, value): - self.ramp_generator_dls.append(value) - def insert_rampGeneratorDL_at(self, index, value): - self.ramp_generator_dls.insert(index, value) - def replace_rampGeneratorDL_at(self, index, value): - self.ramp_generator_dls[index] = value def _hasContent(self): if ( self.pulse_generator_dls or @@ -12804,40 +11252,6 @@ def factory(*args_, **kwargs_): else: return CompoundInput(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_pulseGenerator(self): - return self.pulse_generators - def set_pulseGenerator(self, pulse_generators): - self.pulse_generators = pulse_generators - def add_pulseGenerator(self, value): - self.pulse_generators.append(value) - def insert_pulseGenerator_at(self, index, value): - self.pulse_generators.insert(index, value) - def replace_pulseGenerator_at(self, index, value): - self.pulse_generators[index] = value - def get_sineGenerator(self): - return self.sine_generators - def set_sineGenerator(self, sine_generators): - self.sine_generators = sine_generators - def add_sineGenerator(self, value): - self.sine_generators.append(value) - def insert_sineGenerator_at(self, index, value): - self.sine_generators.insert(index, value) - def replace_sineGenerator_at(self, index, value): - self.sine_generators[index] = value - def get_rampGenerator(self): - return self.ramp_generators - def set_rampGenerator(self, ramp_generators): - self.ramp_generators = ramp_generators - def add_rampGenerator(self, value): - self.ramp_generators.append(value) - def insert_rampGenerator_at(self, index, value): - self.ramp_generators.insert(index, value) - def replace_rampGenerator_at(self, index, value): - self.ramp_generators[index] = value def _hasContent(self): if ( self.pulse_generators or @@ -12960,30 +11374,6 @@ def factory(*args_, **kwargs_): else: return RampGeneratorDL(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_delay(self): - return self.delay - def set_delay(self, delay): - self.delay = delay - def get_duration(self): - return self.duration - def set_duration(self, duration): - self.duration = duration - def get_startAmplitude(self): - return self.start_amplitude - def set_startAmplitude(self, start_amplitude): - self.start_amplitude = start_amplitude - def get_finishAmplitude(self): - return self.finish_amplitude - def set_finishAmplitude(self, finish_amplitude): - self.finish_amplitude = finish_amplitude - def get_baselineAmplitude(self): - return self.baseline_amplitude - def set_baselineAmplitude(self, baseline_amplitude): - self.baseline_amplitude = baseline_amplitude def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -13138,30 +11528,6 @@ def factory(*args_, **kwargs_): else: return RampGenerator(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_delay(self): - return self.delay - def set_delay(self, delay): - self.delay = delay - def get_duration(self): - return self.duration - def set_duration(self, duration): - self.duration = duration - def get_startAmplitude(self): - return self.start_amplitude - def set_startAmplitude(self, start_amplitude): - self.start_amplitude = start_amplitude - def get_finishAmplitude(self): - return self.finish_amplitude - def set_finishAmplitude(self, finish_amplitude): - self.finish_amplitude = finish_amplitude - def get_baselineAmplitude(self): - return self.baseline_amplitude - def set_baselineAmplitude(self, baseline_amplitude): - self.baseline_amplitude = baseline_amplitude def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -13316,30 +11682,6 @@ def factory(*args_, **kwargs_): else: return SineGeneratorDL(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_delay(self): - return self.delay - def set_delay(self, delay): - self.delay = delay - def get_phase(self): - return self.phase - def set_phase(self, phase): - self.phase = phase - def get_duration(self): - return self.duration - def set_duration(self, duration): - self.duration = duration - def get_amplitude(self): - return self.amplitude - def set_amplitude(self, amplitude): - self.amplitude = amplitude - def get_period(self): - return self.period - def set_period(self, period): - self.period = period def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -13494,30 +11836,6 @@ def factory(*args_, **kwargs_): else: return SineGenerator(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_delay(self): - return self.delay - def set_delay(self, delay): - self.delay = delay - def get_phase(self): - return self.phase - def set_phase(self, phase): - self.phase = phase - def get_duration(self): - return self.duration - def set_duration(self, duration): - self.duration = duration - def get_amplitude(self): - return self.amplitude - def set_amplitude(self, amplitude): - self.amplitude = amplitude - def get_period(self): - return self.period - def set_period(self, period): - self.period = period def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -13680,22 +11998,6 @@ def factory(*args_, **kwargs_): else: return PulseGeneratorDL(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_delay(self): - return self.delay - def set_delay(self, delay): - self.delay = delay - def get_duration(self): - return self.duration - def set_duration(self, duration): - self.duration = duration - def get_amplitude(self): - return self.amplitude - def set_amplitude(self, amplitude): - self.amplitude = amplitude def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -13831,22 +12133,6 @@ def factory(*args_, **kwargs_): else: return PulseGenerator(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_delay(self): - return self.delay - def set_delay(self, delay): - self.delay = delay - def get_duration(self): - return self.duration - def set_duration(self, duration): - self.duration = duration - def get_amplitude(self): - return self.amplitude - def set_amplitude(self, amplitude): - self.amplitude = amplitude def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -13981,22 +12267,6 @@ def factory(*args_, **kwargs_): else: return ReactionScheme(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_anytypeobjs_(self): return self.anytypeobjs_ - def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_ - def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value) - def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value - def get_source(self): - return self.source - def set_source(self, source): - self.source = source - def get_type(self): - return self.type - def set_type(self, type): - self.type = type def _hasContent(self): if ( self.anytypeobjs_ or @@ -14070,7 +12340,7 @@ def _buildAttributes(self, node, attrs, already_processed): super(ReactionScheme, self)._buildAttributes(node, attrs, already_processed) def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): content_ = self.gds_build_any(child_, 'ReactionScheme') - self.add_anytypeobjs_(content_) + self.anytypeobjs_.append(content_) super(ReactionScheme, self)._buildChildren(child_, node, nodeName_, True) # end class ReactionScheme @@ -14105,20 +12375,6 @@ def factory(*args_, **kwargs_): else: return ExtracellularProperties(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_species(self): - return self.species - def set_species(self, species): - self.species = species - def add_species(self, value): - self.species.append(value) - def insert_species_at(self, index, value): - self.species.insert(index, value) - def replace_species_at(self, index, value): - self.species[index] = value def _hasContent(self): if ( self.species or @@ -14231,30 +12487,6 @@ def factory(*args_, **kwargs_): else: return ChannelDensityGHK2(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_ionChannel(self): - return self.ion_channel - def set_ionChannel(self, ion_channel): - self.ion_channel = ion_channel - def get_condDensity(self): - return self.cond_density - def set_condDensity(self, cond_density): - self.cond_density = cond_density - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups - def get_segment(self): - return self.segments - def set_segment(self, segments): - self.segments = segments - def get_ion(self): - return self.ion - def set_ion(self, ion): - self.ion = ion def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -14417,30 +12649,6 @@ def factory(*args_, **kwargs_): else: return ChannelDensityGHK(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_ionChannel(self): - return self.ion_channel - def set_ionChannel(self, ion_channel): - self.ion_channel = ion_channel - def get_permeability(self): - return self.permeability - def set_permeability(self, permeability): - self.permeability = permeability - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups - def get_segment(self): - return self.segments - def set_segment(self, segments): - self.segments = segments - def get_ion(self): - return self.ion - def set_ion(self, ion): - self.ion = ion def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -14610,42 +12818,6 @@ def factory(*args_, **kwargs_): else: return ChannelDensityNernst(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_variableParameter(self): - return self.variable_parameters - def set_variableParameter(self, variable_parameters): - self.variable_parameters = variable_parameters - def add_variableParameter(self, value): - self.variable_parameters.append(value) - def insert_variableParameter_at(self, index, value): - self.variable_parameters.insert(index, value) - def replace_variableParameter_at(self, index, value): - self.variable_parameters[index] = value - def get_ionChannel(self): - return self.ion_channel - def set_ionChannel(self, ion_channel): - self.ion_channel = ion_channel - def get_condDensity(self): - return self.cond_density - def set_condDensity(self, cond_density): - self.cond_density = cond_density - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups - def get_segment(self): - return self.segments - def set_segment(self, segments): - self.segments = segments - def get_ion(self): - return self.ion - def set_ion(self, ion): - self.ion = ion - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -14842,46 +13014,6 @@ def factory(*args_, **kwargs_): else: return ChannelDensity(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_variableParameter(self): - return self.variable_parameters - def set_variableParameter(self, variable_parameters): - self.variable_parameters = variable_parameters - def add_variableParameter(self, value): - self.variable_parameters.append(value) - def insert_variableParameter_at(self, index, value): - self.variable_parameters.insert(index, value) - def replace_variableParameter_at(self, index, value): - self.variable_parameters[index] = value - def get_ionChannel(self): - return self.ion_channel - def set_ionChannel(self, ion_channel): - self.ion_channel = ion_channel - def get_condDensity(self): - return self.cond_density - def set_condDensity(self, cond_density): - self.cond_density = cond_density - def get_erev(self): - return self.erev - def set_erev(self, erev): - self.erev = erev - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups - def get_segment(self): - return self.segments - def set_segment(self, segments): - self.segments = segments - def get_ion(self): - return self.ion - def set_ion(self, ion): - self.ion = ion - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -15094,28 +13226,6 @@ def factory(*args_, **kwargs_): else: return ChannelDensityNonUniformGHK(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_variableParameter(self): - return self.variable_parameters - def set_variableParameter(self, variable_parameters): - self.variable_parameters = variable_parameters - def add_variableParameter(self, value): - self.variable_parameters.append(value) - def insert_variableParameter_at(self, index, value): - self.variable_parameters.insert(index, value) - def replace_variableParameter_at(self, index, value): - self.variable_parameters[index] = value - def get_ionChannel(self): - return self.ion_channel - def set_ionChannel(self, ion_channel): - self.ion_channel = ion_channel - def get_ion(self): - return self.ion - def set_ion(self, ion): - self.ion = ion def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -15252,28 +13362,6 @@ def factory(*args_, **kwargs_): else: return ChannelDensityNonUniformNernst(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_variableParameter(self): - return self.variable_parameters - def set_variableParameter(self, variable_parameters): - self.variable_parameters = variable_parameters - def add_variableParameter(self, value): - self.variable_parameters.append(value) - def insert_variableParameter_at(self, index, value): - self.variable_parameters.insert(index, value) - def replace_variableParameter_at(self, index, value): - self.variable_parameters[index] = value - def get_ionChannel(self): - return self.ion_channel - def set_ionChannel(self, ion_channel): - self.ion_channel = ion_channel - def get_ion(self): - return self.ion - def set_ion(self, ion): - self.ion = ion def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -15413,32 +13501,6 @@ def factory(*args_, **kwargs_): else: return ChannelDensityNonUniform(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_variableParameter(self): - return self.variable_parameters - def set_variableParameter(self, variable_parameters): - self.variable_parameters = variable_parameters - def add_variableParameter(self, value): - self.variable_parameters.append(value) - def insert_variableParameter_at(self, index, value): - self.variable_parameters.insert(index, value) - def replace_variableParameter_at(self, index, value): - self.variable_parameters[index] = value - def get_ionChannel(self): - return self.ion_channel - def set_ionChannel(self, ion_channel): - self.ion_channel = ion_channel - def get_erev(self): - return self.erev - def set_erev(self, erev): - self.erev = erev - def get_ion(self): - return self.ion - def set_ion(self, ion): - self.ion = ion def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -15606,44 +13668,6 @@ def factory(*args_, **kwargs_): else: return ChannelPopulation(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_variableParameter(self): - return self.variable_parameters - def set_variableParameter(self, variable_parameters): - self.variable_parameters = variable_parameters - def add_variableParameter(self, value): - self.variable_parameters.append(value) - def insert_variableParameter_at(self, index, value): - self.variable_parameters.insert(index, value) - def replace_variableParameter_at(self, index, value): - self.variable_parameters[index] = value - def get_ionChannel(self): - return self.ion_channel - def set_ionChannel(self, ion_channel): - self.ion_channel = ion_channel - def get_number(self): - return self.number - def set_number(self, number): - self.number = number - def get_erev(self): - return self.erev - def set_erev(self, erev): - self.erev = erev - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups - def get_segment(self): - return self.segments - def set_segment(self, segments): - self.segments = segments - def get_ion(self): - return self.ion - def set_ion(self, ion): - self.ion = ion def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -15828,22 +13852,6 @@ def factory(*args_, **kwargs_): else: return BiophysicalProperties2CaPools(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_membraneProperties2CaPools(self): - return self.membrane_properties2_ca_pools - def set_membraneProperties2CaPools(self, membrane_properties2_ca_pools): - self.membrane_properties2_ca_pools = membrane_properties2_ca_pools - def get_intracellularProperties2CaPools(self): - return self.intracellular_properties2_ca_pools - def set_intracellularProperties2CaPools(self, intracellular_properties2_ca_pools): - self.intracellular_properties2_ca_pools = intracellular_properties2_ca_pools - def get_extracellularProperties(self): - return self.extracellular_properties - def set_extracellularProperties(self, extracellular_properties): - self.extracellular_properties = extracellular_properties def _hasContent(self): if ( self.membrane_properties2_ca_pools is not None or @@ -15964,22 +13972,6 @@ def factory(*args_, **kwargs_): else: return BiophysicalProperties(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_membraneProperties(self): - return self.membrane_properties - def set_membraneProperties(self, membrane_properties): - self.membrane_properties = membrane_properties - def get_intracellularProperties(self): - return self.intracellular_properties - def set_intracellularProperties(self, intracellular_properties): - self.intracellular_properties = intracellular_properties - def get_extracellularProperties(self): - return self.extracellular_properties - def set_extracellularProperties(self, extracellular_properties): - self.extracellular_properties = extracellular_properties def _hasContent(self): if ( self.membrane_properties is not None or @@ -16101,26 +14093,6 @@ def factory(*args_, **kwargs_): else: return InhomogeneousParameter(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_proximal(self): - return self.proximal - def set_proximal(self, proximal): - self.proximal = proximal - def get_distal(self): - return self.distal - def set_distal(self, distal): - self.distal = distal - def get_variable(self): - return self.variable - def set_variable(self, variable): - self.variable = variable - def get_metric(self): - return self.metric - def set_metric(self, metric): - self.metric = metric def validate_Metric(self, value): # Validate type Metric, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -16290,78 +14262,6 @@ def factory(*args_, **kwargs_): else: return SegmentGroup(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_notes(self): - return self.notes - def set_notes(self, notes): - self.notes = notes - def get_property(self): - return self.properties - def set_property(self, properties): - self.properties = properties - def add_property(self, value): - self.properties.append(value) - def insert_property_at(self, index, value): - self.properties.insert(index, value) - def replace_property_at(self, index, value): - self.properties[index] = value - def get_annotation(self): - return self.annotation - def set_annotation(self, annotation): - self.annotation = annotation - def get_member(self): - return self.members - def set_member(self, members): - self.members = members - def add_member(self, value): - self.members.append(value) - def insert_member_at(self, index, value): - self.members.insert(index, value) - def replace_member_at(self, index, value): - self.members[index] = value - def get_include(self): - return self.includes - def set_include(self, includes): - self.includes = includes - def add_include(self, value): - self.includes.append(value) - def insert_include_at(self, index, value): - self.includes.insert(index, value) - def replace_include_at(self, index, value): - self.includes[index] = value - def get_path(self): - return self.paths - def set_path(self, paths): - self.paths = paths - def add_path(self, value): - self.paths.append(value) - def insert_path_at(self, index, value): - self.paths.insert(index, value) - def replace_path_at(self, index, value): - self.paths[index] = value - def get_subTree(self): - return self.sub_trees - def set_subTree(self, sub_trees): - self.sub_trees = sub_trees - def add_subTree(self, value): - self.sub_trees.append(value) - def insert_subTree_at(self, index, value): - self.sub_trees.insert(index, value) - def replace_subTree_at(self, index, value): - self.sub_trees[index] = value - def get_inhomogeneousParameter(self): - return self.inhomogeneous_parameters - def set_inhomogeneousParameter(self, inhomogeneous_parameters): - self.inhomogeneous_parameters = inhomogeneous_parameters - def add_inhomogeneousParameter(self, value): - self.inhomogeneous_parameters.append(value) - def insert_inhomogeneousParameter_at(self, index, value): - self.inhomogeneous_parameters.insert(index, value) - def replace_inhomogeneousParameter_at(self, index, value): - self.inhomogeneous_parameters[index] = value def validate_Notes(self, value): result = True # Validate type Notes, a restriction on xs:string. @@ -16550,26 +14450,6 @@ def factory(*args_, **kwargs_): else: return Segment(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_parent(self): - return self.parent - def set_parent(self, parent): - self.parent = parent - def get_proximal(self): - return self.proximal - def set_proximal(self, proximal): - self.proximal = proximal - def get_distal(self): - return self.distal - def set_distal(self, distal): - self.distal = distal - def get_name(self): - return self.name - def set_name(self, name): - self.name = name def _hasContent(self): if ( self.parent is not None or @@ -16787,30 +14667,6 @@ def factory(*args_, **kwargs_): else: return Morphology(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_segment(self): - return self.segments - def set_segment(self, segments): - self.segments = segments - def add_segment(self, value): - self.segments.append(value) - def insert_segment_at(self, index, value): - self.segments.insert(index, value) - def replace_segment_at(self, index, value): - self.segments[index] = value - def get_segmentGroup(self): - return self.segment_groups - def set_segmentGroup(self, segment_groups): - self.segment_groups = segment_groups - def add_segmentGroup(self, value): - self.segment_groups.append(value) - def insert_segmentGroup_at(self, index, value): - self.segment_groups.insert(index, value) - def replace_segmentGroup_at(self, index, value): - self.segment_groups[index] = value def _hasContent(self): if ( self.segments or @@ -16918,12 +14774,6 @@ def factory(*args_, **kwargs_): else: return BaseCell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( super(BaseCell, self)._hasContent() @@ -17014,12 +14864,6 @@ def factory(*args_, **kwargs_): else: return BaseSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( super(BaseSynapse, self)._hasContent() @@ -17124,26 +14968,6 @@ def factory(*args_, **kwargs_): else: return FixedFactorConcentrationModel(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_ion(self): - return self.ion - def set_ion(self, ion): - self.ion = ion - def get_restingConc(self): - return self.resting_conc - def set_restingConc(self, resting_conc): - self.resting_conc = resting_conc - def get_decayConstant(self): - return self.decay_constant - def set_decayConstant(self, decay_constant): - self.decay_constant = decay_constant - def get_rho(self): - return self.rho - def set_rho(self, rho): - self.rho = rho def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -17313,28 +15137,6 @@ def factory(*args_, **kwargs_): else: return DecayingPoolConcentrationModel(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_ion(self): - return self.ion - def set_ion(self, ion): - self.ion = ion - def get_restingConc(self): - return self.resting_conc - def set_restingConc(self, resting_conc): - self.resting_conc = resting_conc - def get_decayConstant(self): - return self.decay_constant - def set_decayConstant(self, decay_constant): - self.decay_constant = decay_constant - def get_shellThickness(self): - return self.shell_thickness - def set_shellThickness(self, shell_thickness): - self.shell_thickness = shell_thickness - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -17516,30 +15318,6 @@ def factory(*args_, **kwargs_): else: return GateFractionalSubgate(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_notes(self): - return self.notes - def set_notes(self, notes): - self.notes = notes - def get_q10Settings(self): - return self.q10_settings - def set_q10Settings(self, q10_settings): - self.q10_settings = q10_settings - def get_steadyState(self): - return self.steady_state - def set_steadyState(self, steady_state): - self.steady_state = steady_state - def get_timeCourse(self): - return self.time_course - def set_timeCourse(self, time_course): - self.time_course = time_course - def get_fractionalConductance(self): - return self.fractional_conductance - def set_fractionalConductance(self, fractional_conductance): - self.fractional_conductance = fractional_conductance def validate_Notes(self, value): result = True # Validate type Notes, a restriction on xs:string. @@ -17705,32 +15483,6 @@ def factory(*args_, **kwargs_): else: return GateFractional(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_notes(self): - return self.notes - def set_notes(self, notes): - self.notes = notes - def get_q10Settings(self): - return self.q10_settings - def set_q10Settings(self, q10_settings): - self.q10_settings = q10_settings - def get_subGate(self): - return self.sub_gates - def set_subGate(self, sub_gates): - self.sub_gates = sub_gates - def add_subGate(self, value): - self.sub_gates.append(value) - def insert_subGate_at(self, index, value): - self.sub_gates.insert(index, value) - def replace_subGate_at(self, index, value): - self.sub_gates[index] = value - def get_instances(self): - return self.instances - def set_instances(self, instances): - self.instances = instances def validate_Notes(self, value): result = True # Validate type Notes, a restriction on xs:string. @@ -17880,22 +15632,6 @@ def factory(*args_, **kwargs_): else: return GateHHInstantaneous(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_notes(self): - return self.notes - def set_notes(self, notes): - self.notes = notes - def get_steadyState(self): - return self.steady_state - def set_steadyState(self, steady_state): - self.steady_state = steady_state - def get_instances(self): - return self.instances - def set_instances(self, instances): - self.instances = instances def validate_Notes(self, value): result = True # Validate type Notes, a restriction on xs:string. @@ -18045,34 +15781,6 @@ def factory(*args_, **kwargs_): else: return GateHHRatesInf(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_notes(self): - return self.notes - def set_notes(self, notes): - self.notes = notes - def get_q10Settings(self): - return self.q10_settings - def set_q10Settings(self, q10_settings): - self.q10_settings = q10_settings - def get_forwardRate(self): - return self.forward_rate - def set_forwardRate(self, forward_rate): - self.forward_rate = forward_rate - def get_reverseRate(self): - return self.reverse_rate - def set_reverseRate(self, reverse_rate): - self.reverse_rate = reverse_rate - def get_steadyState(self): - return self.steady_state - def set_steadyState(self, steady_state): - self.steady_state = steady_state - def get_instances(self): - return self.instances - def set_instances(self, instances): - self.instances = instances def validate_Notes(self, value): result = True # Validate type Notes, a restriction on xs:string. @@ -18249,34 +15957,6 @@ def factory(*args_, **kwargs_): else: return GateHHRatesTau(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_notes(self): - return self.notes - def set_notes(self, notes): - self.notes = notes - def get_q10Settings(self): - return self.q10_settings - def set_q10Settings(self, q10_settings): - self.q10_settings = q10_settings - def get_forwardRate(self): - return self.forward_rate - def set_forwardRate(self, forward_rate): - self.forward_rate = forward_rate - def get_reverseRate(self): - return self.reverse_rate - def set_reverseRate(self, reverse_rate): - self.reverse_rate = reverse_rate - def get_timeCourse(self): - return self.time_course - def set_timeCourse(self, time_course): - self.time_course = time_course - def get_instances(self): - return self.instances - def set_instances(self, instances): - self.instances = instances def validate_Notes(self, value): result = True # Validate type Notes, a restriction on xs:string. @@ -18456,38 +16136,6 @@ def factory(*args_, **kwargs_): else: return GateHHRatesTauInf(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_notes(self): - return self.notes - def set_notes(self, notes): - self.notes = notes - def get_q10Settings(self): - return self.q10_settings - def set_q10Settings(self, q10_settings): - self.q10_settings = q10_settings - def get_forwardRate(self): - return self.forward_rate - def set_forwardRate(self, forward_rate): - self.forward_rate = forward_rate - def get_reverseRate(self): - return self.reverse_rate - def set_reverseRate(self, reverse_rate): - self.reverse_rate = reverse_rate - def get_timeCourse(self): - return self.time_course - def set_timeCourse(self, time_course): - self.time_course = time_course - def get_steadyState(self): - return self.steady_state - def set_steadyState(self, steady_state): - self.steady_state = steady_state - def get_instances(self): - return self.instances - def set_instances(self, instances): - self.instances = instances def validate_Notes(self, value): result = True # Validate type Notes, a restriction on xs:string. @@ -18670,30 +16318,6 @@ def factory(*args_, **kwargs_): else: return GateHHTauInf(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_notes(self): - return self.notes - def set_notes(self, notes): - self.notes = notes - def get_q10Settings(self): - return self.q10_settings - def set_q10Settings(self, q10_settings): - self.q10_settings = q10_settings - def get_timeCourse(self): - return self.time_course - def set_timeCourse(self, time_course): - self.time_course = time_course - def get_steadyState(self): - return self.steady_state - def set_steadyState(self, steady_state): - self.steady_state = steady_state - def get_instances(self): - return self.instances - def set_instances(self, instances): - self.instances = instances def validate_Notes(self, value): result = True # Validate type Notes, a restriction on xs:string. @@ -18858,30 +16482,6 @@ def factory(*args_, **kwargs_): else: return GateHHRates(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_notes(self): - return self.notes - def set_notes(self, notes): - self.notes = notes - def get_q10Settings(self): - return self.q10_settings - def set_q10Settings(self, q10_settings): - self.q10_settings = q10_settings - def get_forwardRate(self): - return self.forward_rate - def set_forwardRate(self, forward_rate): - self.forward_rate = forward_rate - def get_reverseRate(self): - return self.reverse_rate - def set_reverseRate(self, reverse_rate): - self.reverse_rate = reverse_rate - def get_instances(self): - return self.instances - def set_instances(self, instances): - self.instances = instances def validate_Notes(self, value): result = True # Validate type Notes, a restriction on xs:string. @@ -19064,52 +16664,6 @@ def factory(*args_, **kwargs_): else: return GateHHUndetermined(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_notes(self): - return self.notes - def set_notes(self, notes): - self.notes = notes - def get_q10Settings(self): - return self.q10_settings - def set_q10Settings(self, q10_settings): - self.q10_settings = q10_settings - def get_forwardRate(self): - return self.forward_rate - def set_forwardRate(self, forward_rate): - self.forward_rate = forward_rate - def get_reverseRate(self): - return self.reverse_rate - def set_reverseRate(self, reverse_rate): - self.reverse_rate = reverse_rate - def get_timeCourse(self): - return self.time_course - def set_timeCourse(self, time_course): - self.time_course = time_course - def get_steadyState(self): - return self.steady_state - def set_steadyState(self, steady_state): - self.steady_state = steady_state - def get_subGate(self): - return self.sub_gates - def set_subGate(self, sub_gates): - self.sub_gates = sub_gates - def add_subGate(self, value): - self.sub_gates.append(value) - def insert_subGate_at(self, index, value): - self.sub_gates.insert(index, value) - def replace_subGate_at(self, index, value): - self.sub_gates[index] = value - def get_instances(self): - return self.instances - def set_instances(self, instances): - self.instances = instances - def get_type(self): - return self.type - def set_type(self, type): - self.type = type def validate_Notes(self, value): result = True # Validate type Notes, a restriction on xs:string. @@ -19346,72 +16900,6 @@ def factory(*args_, **kwargs_): else: return GateKS(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_notes(self): - return self.notes - def set_notes(self, notes): - self.notes = notes - def get_q10Settings(self): - return self.q10_settings - def set_q10Settings(self, q10_settings): - self.q10_settings = q10_settings - def get_closedState(self): - return self.closed_states - def set_closedState(self, closed_states): - self.closed_states = closed_states - def add_closedState(self, value): - self.closed_states.append(value) - def insert_closedState_at(self, index, value): - self.closed_states.insert(index, value) - def replace_closedState_at(self, index, value): - self.closed_states[index] = value - def get_openState(self): - return self.open_states - def set_openState(self, open_states): - self.open_states = open_states - def add_openState(self, value): - self.open_states.append(value) - def insert_openState_at(self, index, value): - self.open_states.insert(index, value) - def replace_openState_at(self, index, value): - self.open_states[index] = value - def get_forwardTransition(self): - return self.forward_transition - def set_forwardTransition(self, forward_transition): - self.forward_transition = forward_transition - def add_forwardTransition(self, value): - self.forward_transition.append(value) - def insert_forwardTransition_at(self, index, value): - self.forward_transition.insert(index, value) - def replace_forwardTransition_at(self, index, value): - self.forward_transition[index] = value - def get_reverseTransition(self): - return self.reverse_transition - def set_reverseTransition(self, reverse_transition): - self.reverse_transition = reverse_transition - def add_reverseTransition(self, value): - self.reverse_transition.append(value) - def insert_reverseTransition_at(self, index, value): - self.reverse_transition.insert(index, value) - def replace_reverseTransition_at(self, index, value): - self.reverse_transition[index] = value - def get_tauInfTransition(self): - return self.tau_inf_transition - def set_tauInfTransition(self, tau_inf_transition): - self.tau_inf_transition = tau_inf_transition - def add_tauInfTransition(self, value): - self.tau_inf_transition.append(value) - def insert_tauInfTransition_at(self, index, value): - self.tau_inf_transition.insert(index, value) - def replace_tauInfTransition_at(self, index, value): - self.tau_inf_transition[index] = value - def get_instances(self): - return self.instances - def set_instances(self, instances): - self.instances = instances def validate_Notes(self, value): result = True # Validate type Notes, a restriction on xs:string. @@ -19599,26 +17087,6 @@ def factory(*args_, **kwargs_): else: return TauInfTransition(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_steadyState(self): - return self.steady_state - def set_steadyState(self, steady_state): - self.steady_state = steady_state - def get_timeCourse(self): - return self.time_course - def set_timeCourse(self, time_course): - self.time_course = time_course - def get_from(self): - return self.from_ - def set_from(self, from_): - self.from_ = from_ - def get_to(self): - return self.to - def set_to(self, to): - self.to = to def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -19755,22 +17223,6 @@ def factory(*args_, **kwargs_): else: return ReverseTransition(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_anytypeobjs_(self): return self.anytypeobjs_ - def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_ - def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value) - def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value - def get_from(self): - return self.from_ - def set_from(self, from_): - self.from_ = from_ - def get_to(self): - return self.to - def set_to(self, to): - self.to = to def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -19857,7 +17309,7 @@ def _buildAttributes(self, node, attrs, already_processed): super(ReverseTransition, self)._buildAttributes(node, attrs, already_processed) def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): content_ = self.gds_build_any(child_, 'ReverseTransition') - self.add_anytypeobjs_(content_) + self.anytypeobjs_.append(content_) super(ReverseTransition, self)._buildChildren(child_, node, nodeName_, True) # end class ReverseTransition @@ -19897,22 +17349,6 @@ def factory(*args_, **kwargs_): else: return ForwardTransition(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_anytypeobjs_(self): return self.anytypeobjs_ - def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_ - def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value) - def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value - def get_from(self): - return self.from_ - def set_from(self, from_): - self.from_ = from_ - def get_to(self): - return self.to - def set_to(self, to): - self.to = to def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -19999,7 +17435,7 @@ def _buildAttributes(self, node, attrs, already_processed): super(ForwardTransition, self)._buildAttributes(node, attrs, already_processed) def _buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): content_ = self.gds_build_any(child_, 'ForwardTransition') - self.add_anytypeobjs_(content_) + self.anytypeobjs_.append(content_) super(ForwardTransition, self)._buildChildren(child_, node, nodeName_, True) # end class ForwardTransition @@ -20028,10 +17464,6 @@ def factory(*args_, **kwargs_): else: return OpenState(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(OpenState, self)._hasContent() @@ -20109,10 +17541,6 @@ def factory(*args_, **kwargs_): else: return ClosedState(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(ClosedState, self)._hasContent() @@ -20206,28 +17634,6 @@ def factory(*args_, **kwargs_): else: return IonChannelKS(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_gateKS(self): - return self.gate_kses - def set_gateKS(self, gate_kses): - self.gate_kses = gate_kses - def add_gateKS(self, value): - self.gate_kses.append(value) - def insert_gateKS_at(self, index, value): - self.gate_kses.insert(index, value) - def replace_gateKS_at(self, index, value): - self.gate_kses[index] = value - def get_species(self): - return self.species - def set_species(self, species): - self.species = species - def get_conductance(self): - return self.conductance - def set_conductance(self, conductance): - self.conductance = conductance def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -20362,22 +17768,6 @@ def factory(*args_, **kwargs_): else: return IonChannelScalable(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_q10ConductanceScaling(self): - return self.q10_conductance_scalings - def set_q10ConductanceScaling(self, q10_conductance_scalings): - self.q10_conductance_scalings = q10_conductance_scalings - def add_q10ConductanceScaling(self, value): - self.q10_conductance_scalings.append(value) - def insert_q10ConductanceScaling_at(self, index, value): - self.q10_conductance_scalings.insert(index, value) - def replace_q10ConductanceScaling_at(self, index, value): - self.q10_conductance_scalings[index] = value - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( self.q10_conductance_scalings or @@ -20881,680 +18271,6 @@ def factory(*args_, **kwargs_): else: return NeuroMLDocument(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_include(self): - return self.includes - def set_include(self, includes): - self.includes = includes - def add_include(self, value): - self.includes.append(value) - def insert_include_at(self, index, value): - self.includes.insert(index, value) - def replace_include_at(self, index, value): - self.includes[index] = value - def get_extracellularProperties(self): - return self.extracellular_properties - def set_extracellularProperties(self, extracellular_properties): - self.extracellular_properties = extracellular_properties - def add_extracellularProperties(self, value): - self.extracellular_properties.append(value) - def insert_extracellularProperties_at(self, index, value): - self.extracellular_properties.insert(index, value) - def replace_extracellularProperties_at(self, index, value): - self.extracellular_properties[index] = value - def get_intracellularProperties(self): - return self.intracellular_properties - def set_intracellularProperties(self, intracellular_properties): - self.intracellular_properties = intracellular_properties - def add_intracellularProperties(self, value): - self.intracellular_properties.append(value) - def insert_intracellularProperties_at(self, index, value): - self.intracellular_properties.insert(index, value) - def replace_intracellularProperties_at(self, index, value): - self.intracellular_properties[index] = value - def get_morphology(self): - return self.morphology - def set_morphology(self, morphology): - self.morphology = morphology - def add_morphology(self, value): - self.morphology.append(value) - def insert_morphology_at(self, index, value): - self.morphology.insert(index, value) - def replace_morphology_at(self, index, value): - self.morphology[index] = value - def get_ionChannel(self): - return self.ion_channel - def set_ionChannel(self, ion_channel): - self.ion_channel = ion_channel - def add_ionChannel(self, value): - self.ion_channel.append(value) - def insert_ionChannel_at(self, index, value): - self.ion_channel.insert(index, value) - def replace_ionChannel_at(self, index, value): - self.ion_channel[index] = value - def get_ionChannelHH(self): - return self.ion_channel_hhs - def set_ionChannelHH(self, ion_channel_hhs): - self.ion_channel_hhs = ion_channel_hhs - def add_ionChannelHH(self, value): - self.ion_channel_hhs.append(value) - def insert_ionChannelHH_at(self, index, value): - self.ion_channel_hhs.insert(index, value) - def replace_ionChannelHH_at(self, index, value): - self.ion_channel_hhs[index] = value - def get_ionChannelVShift(self): - return self.ion_channel_v_shifts - def set_ionChannelVShift(self, ion_channel_v_shifts): - self.ion_channel_v_shifts = ion_channel_v_shifts - def add_ionChannelVShift(self, value): - self.ion_channel_v_shifts.append(value) - def insert_ionChannelVShift_at(self, index, value): - self.ion_channel_v_shifts.insert(index, value) - def replace_ionChannelVShift_at(self, index, value): - self.ion_channel_v_shifts[index] = value - def get_ionChannelKS(self): - return self.ion_channel_kses - def set_ionChannelKS(self, ion_channel_kses): - self.ion_channel_kses = ion_channel_kses - def add_ionChannelKS(self, value): - self.ion_channel_kses.append(value) - def insert_ionChannelKS_at(self, index, value): - self.ion_channel_kses.insert(index, value) - def replace_ionChannelKS_at(self, index, value): - self.ion_channel_kses[index] = value - def get_decayingPoolConcentrationModel(self): - return self.decaying_pool_concentration_models - def set_decayingPoolConcentrationModel(self, decaying_pool_concentration_models): - self.decaying_pool_concentration_models = decaying_pool_concentration_models - def add_decayingPoolConcentrationModel(self, value): - self.decaying_pool_concentration_models.append(value) - def insert_decayingPoolConcentrationModel_at(self, index, value): - self.decaying_pool_concentration_models.insert(index, value) - def replace_decayingPoolConcentrationModel_at(self, index, value): - self.decaying_pool_concentration_models[index] = value - def get_fixedFactorConcentrationModel(self): - return self.fixed_factor_concentration_models - def set_fixedFactorConcentrationModel(self, fixed_factor_concentration_models): - self.fixed_factor_concentration_models = fixed_factor_concentration_models - def add_fixedFactorConcentrationModel(self, value): - self.fixed_factor_concentration_models.append(value) - def insert_fixedFactorConcentrationModel_at(self, index, value): - self.fixed_factor_concentration_models.insert(index, value) - def replace_fixedFactorConcentrationModel_at(self, index, value): - self.fixed_factor_concentration_models[index] = value - def get_alphaCurrentSynapse(self): - return self.alpha_current_synapses - def set_alphaCurrentSynapse(self, alpha_current_synapses): - self.alpha_current_synapses = alpha_current_synapses - def add_alphaCurrentSynapse(self, value): - self.alpha_current_synapses.append(value) - def insert_alphaCurrentSynapse_at(self, index, value): - self.alpha_current_synapses.insert(index, value) - def replace_alphaCurrentSynapse_at(self, index, value): - self.alpha_current_synapses[index] = value - def get_alphaSynapse(self): - return self.alpha_synapses - def set_alphaSynapse(self, alpha_synapses): - self.alpha_synapses = alpha_synapses - def add_alphaSynapse(self, value): - self.alpha_synapses.append(value) - def insert_alphaSynapse_at(self, index, value): - self.alpha_synapses.insert(index, value) - def replace_alphaSynapse_at(self, index, value): - self.alpha_synapses[index] = value - def get_expOneSynapse(self): - return self.exp_one_synapses - def set_expOneSynapse(self, exp_one_synapses): - self.exp_one_synapses = exp_one_synapses - def add_expOneSynapse(self, value): - self.exp_one_synapses.append(value) - def insert_expOneSynapse_at(self, index, value): - self.exp_one_synapses.insert(index, value) - def replace_expOneSynapse_at(self, index, value): - self.exp_one_synapses[index] = value - def get_expTwoSynapse(self): - return self.exp_two_synapses - def set_expTwoSynapse(self, exp_two_synapses): - self.exp_two_synapses = exp_two_synapses - def add_expTwoSynapse(self, value): - self.exp_two_synapses.append(value) - def insert_expTwoSynapse_at(self, index, value): - self.exp_two_synapses.insert(index, value) - def replace_expTwoSynapse_at(self, index, value): - self.exp_two_synapses[index] = value - def get_expThreeSynapse(self): - return self.exp_three_synapses - def set_expThreeSynapse(self, exp_three_synapses): - self.exp_three_synapses = exp_three_synapses - def add_expThreeSynapse(self, value): - self.exp_three_synapses.append(value) - def insert_expThreeSynapse_at(self, index, value): - self.exp_three_synapses.insert(index, value) - def replace_expThreeSynapse_at(self, index, value): - self.exp_three_synapses[index] = value - def get_blockingPlasticSynapse(self): - return self.blocking_plastic_synapses - def set_blockingPlasticSynapse(self, blocking_plastic_synapses): - self.blocking_plastic_synapses = blocking_plastic_synapses - def add_blockingPlasticSynapse(self, value): - self.blocking_plastic_synapses.append(value) - def insert_blockingPlasticSynapse_at(self, index, value): - self.blocking_plastic_synapses.insert(index, value) - def replace_blockingPlasticSynapse_at(self, index, value): - self.blocking_plastic_synapses[index] = value - def get_doubleSynapse(self): - return self.double_synapses - def set_doubleSynapse(self, double_synapses): - self.double_synapses = double_synapses - def add_doubleSynapse(self, value): - self.double_synapses.append(value) - def insert_doubleSynapse_at(self, index, value): - self.double_synapses.insert(index, value) - def replace_doubleSynapse_at(self, index, value): - self.double_synapses[index] = value - def get_gapJunction(self): - return self.gap_junctions - def set_gapJunction(self, gap_junctions): - self.gap_junctions = gap_junctions - def add_gapJunction(self, value): - self.gap_junctions.append(value) - def insert_gapJunction_at(self, index, value): - self.gap_junctions.insert(index, value) - def replace_gapJunction_at(self, index, value): - self.gap_junctions[index] = value - def get_silentSynapse(self): - return self.silent_synapses - def set_silentSynapse(self, silent_synapses): - self.silent_synapses = silent_synapses - def add_silentSynapse(self, value): - self.silent_synapses.append(value) - def insert_silentSynapse_at(self, index, value): - self.silent_synapses.insert(index, value) - def replace_silentSynapse_at(self, index, value): - self.silent_synapses[index] = value - def get_linearGradedSynapse(self): - return self.linear_graded_synapses - def set_linearGradedSynapse(self, linear_graded_synapses): - self.linear_graded_synapses = linear_graded_synapses - def add_linearGradedSynapse(self, value): - self.linear_graded_synapses.append(value) - def insert_linearGradedSynapse_at(self, index, value): - self.linear_graded_synapses.insert(index, value) - def replace_linearGradedSynapse_at(self, index, value): - self.linear_graded_synapses[index] = value - def get_gradedSynapse(self): - return self.graded_synapses - def set_gradedSynapse(self, graded_synapses): - self.graded_synapses = graded_synapses - def add_gradedSynapse(self, value): - self.graded_synapses.append(value) - def insert_gradedSynapse_at(self, index, value): - self.graded_synapses.insert(index, value) - def replace_gradedSynapse_at(self, index, value): - self.graded_synapses[index] = value - def get_biophysicalProperties(self): - return self.biophysical_properties - def set_biophysicalProperties(self, biophysical_properties): - self.biophysical_properties = biophysical_properties - def add_biophysicalProperties(self, value): - self.biophysical_properties.append(value) - def insert_biophysicalProperties_at(self, index, value): - self.biophysical_properties.insert(index, value) - def replace_biophysicalProperties_at(self, index, value): - self.biophysical_properties[index] = value - def get_cell(self): - return self.cells - def set_cell(self, cells): - self.cells = cells - def add_cell(self, value): - self.cells.append(value) - def insert_cell_at(self, index, value): - self.cells.insert(index, value) - def replace_cell_at(self, index, value): - self.cells[index] = value - def get_cell2CaPools(self): - return self.cell2_ca_poolses - def set_cell2CaPools(self, cell2_ca_poolses): - self.cell2_ca_poolses = cell2_ca_poolses - def add_cell2CaPools(self, value): - self.cell2_ca_poolses.append(value) - def insert_cell2CaPools_at(self, index, value): - self.cell2_ca_poolses.insert(index, value) - def replace_cell2CaPools_at(self, index, value): - self.cell2_ca_poolses[index] = value - def get_baseCell(self): - return self.base_cells - def set_baseCell(self, base_cells): - self.base_cells = base_cells - def add_baseCell(self, value): - self.base_cells.append(value) - def insert_baseCell_at(self, index, value): - self.base_cells.insert(index, value) - def replace_baseCell_at(self, index, value): - self.base_cells[index] = value - def get_iafTauCell(self): - return self.iaf_tau_cells - def set_iafTauCell(self, iaf_tau_cells): - self.iaf_tau_cells = iaf_tau_cells - def add_iafTauCell(self, value): - self.iaf_tau_cells.append(value) - def insert_iafTauCell_at(self, index, value): - self.iaf_tau_cells.insert(index, value) - def replace_iafTauCell_at(self, index, value): - self.iaf_tau_cells[index] = value - def get_iafTauRefCell(self): - return self.iaf_tau_ref_cells - def set_iafTauRefCell(self, iaf_tau_ref_cells): - self.iaf_tau_ref_cells = iaf_tau_ref_cells - def add_iafTauRefCell(self, value): - self.iaf_tau_ref_cells.append(value) - def insert_iafTauRefCell_at(self, index, value): - self.iaf_tau_ref_cells.insert(index, value) - def replace_iafTauRefCell_at(self, index, value): - self.iaf_tau_ref_cells[index] = value - def get_iafCell(self): - return self.iaf_cells - def set_iafCell(self, iaf_cells): - self.iaf_cells = iaf_cells - def add_iafCell(self, value): - self.iaf_cells.append(value) - def insert_iafCell_at(self, index, value): - self.iaf_cells.insert(index, value) - def replace_iafCell_at(self, index, value): - self.iaf_cells[index] = value - def get_iafRefCell(self): - return self.iaf_ref_cells - def set_iafRefCell(self, iaf_ref_cells): - self.iaf_ref_cells = iaf_ref_cells - def add_iafRefCell(self, value): - self.iaf_ref_cells.append(value) - def insert_iafRefCell_at(self, index, value): - self.iaf_ref_cells.insert(index, value) - def replace_iafRefCell_at(self, index, value): - self.iaf_ref_cells[index] = value - def get_izhikevichCell(self): - return self.izhikevich_cells - def set_izhikevichCell(self, izhikevich_cells): - self.izhikevich_cells = izhikevich_cells - def add_izhikevichCell(self, value): - self.izhikevich_cells.append(value) - def insert_izhikevichCell_at(self, index, value): - self.izhikevich_cells.insert(index, value) - def replace_izhikevichCell_at(self, index, value): - self.izhikevich_cells[index] = value - def get_izhikevich2007Cell(self): - return self.izhikevich2007_cells - def set_izhikevich2007Cell(self, izhikevich2007_cells): - self.izhikevich2007_cells = izhikevich2007_cells - def add_izhikevich2007Cell(self, value): - self.izhikevich2007_cells.append(value) - def insert_izhikevich2007Cell_at(self, index, value): - self.izhikevich2007_cells.insert(index, value) - def replace_izhikevich2007Cell_at(self, index, value): - self.izhikevich2007_cells[index] = value - def get_adExIaFCell(self): - return self.ad_ex_ia_f_cells - def set_adExIaFCell(self, ad_ex_ia_f_cells): - self.ad_ex_ia_f_cells = ad_ex_ia_f_cells - def add_adExIaFCell(self, value): - self.ad_ex_ia_f_cells.append(value) - def insert_adExIaFCell_at(self, index, value): - self.ad_ex_ia_f_cells.insert(index, value) - def replace_adExIaFCell_at(self, index, value): - self.ad_ex_ia_f_cells[index] = value - def get_fitzHughNagumoCell(self): - return self.fitz_hugh_nagumo_cells - def set_fitzHughNagumoCell(self, fitz_hugh_nagumo_cells): - self.fitz_hugh_nagumo_cells = fitz_hugh_nagumo_cells - def add_fitzHughNagumoCell(self, value): - self.fitz_hugh_nagumo_cells.append(value) - def insert_fitzHughNagumoCell_at(self, index, value): - self.fitz_hugh_nagumo_cells.insert(index, value) - def replace_fitzHughNagumoCell_at(self, index, value): - self.fitz_hugh_nagumo_cells[index] = value - def get_fitzHughNagumo1969Cell(self): - return self.fitz_hugh_nagumo1969_cells - def set_fitzHughNagumo1969Cell(self, fitz_hugh_nagumo1969_cells): - self.fitz_hugh_nagumo1969_cells = fitz_hugh_nagumo1969_cells - def add_fitzHughNagumo1969Cell(self, value): - self.fitz_hugh_nagumo1969_cells.append(value) - def insert_fitzHughNagumo1969Cell_at(self, index, value): - self.fitz_hugh_nagumo1969_cells.insert(index, value) - def replace_fitzHughNagumo1969Cell_at(self, index, value): - self.fitz_hugh_nagumo1969_cells[index] = value - def get_pinskyRinzelCA3Cell(self): - return self.pinsky_rinzel_ca3_cells - def set_pinskyRinzelCA3Cell(self, pinsky_rinzel_ca3_cells): - self.pinsky_rinzel_ca3_cells = pinsky_rinzel_ca3_cells - def add_pinskyRinzelCA3Cell(self, value): - self.pinsky_rinzel_ca3_cells.append(value) - def insert_pinskyRinzelCA3Cell_at(self, index, value): - self.pinsky_rinzel_ca3_cells.insert(index, value) - def replace_pinskyRinzelCA3Cell_at(self, index, value): - self.pinsky_rinzel_ca3_cells[index] = value - def get_pulseGenerator(self): - return self.pulse_generators - def set_pulseGenerator(self, pulse_generators): - self.pulse_generators = pulse_generators - def add_pulseGenerator(self, value): - self.pulse_generators.append(value) - def insert_pulseGenerator_at(self, index, value): - self.pulse_generators.insert(index, value) - def replace_pulseGenerator_at(self, index, value): - self.pulse_generators[index] = value - def get_pulseGeneratorDL(self): - return self.pulse_generator_dls - def set_pulseGeneratorDL(self, pulse_generator_dls): - self.pulse_generator_dls = pulse_generator_dls - def add_pulseGeneratorDL(self, value): - self.pulse_generator_dls.append(value) - def insert_pulseGeneratorDL_at(self, index, value): - self.pulse_generator_dls.insert(index, value) - def replace_pulseGeneratorDL_at(self, index, value): - self.pulse_generator_dls[index] = value - def get_sineGenerator(self): - return self.sine_generators - def set_sineGenerator(self, sine_generators): - self.sine_generators = sine_generators - def add_sineGenerator(self, value): - self.sine_generators.append(value) - def insert_sineGenerator_at(self, index, value): - self.sine_generators.insert(index, value) - def replace_sineGenerator_at(self, index, value): - self.sine_generators[index] = value - def get_sineGeneratorDL(self): - return self.sine_generator_dls - def set_sineGeneratorDL(self, sine_generator_dls): - self.sine_generator_dls = sine_generator_dls - def add_sineGeneratorDL(self, value): - self.sine_generator_dls.append(value) - def insert_sineGeneratorDL_at(self, index, value): - self.sine_generator_dls.insert(index, value) - def replace_sineGeneratorDL_at(self, index, value): - self.sine_generator_dls[index] = value - def get_rampGenerator(self): - return self.ramp_generators - def set_rampGenerator(self, ramp_generators): - self.ramp_generators = ramp_generators - def add_rampGenerator(self, value): - self.ramp_generators.append(value) - def insert_rampGenerator_at(self, index, value): - self.ramp_generators.insert(index, value) - def replace_rampGenerator_at(self, index, value): - self.ramp_generators[index] = value - def get_rampGeneratorDL(self): - return self.ramp_generator_dls - def set_rampGeneratorDL(self, ramp_generator_dls): - self.ramp_generator_dls = ramp_generator_dls - def add_rampGeneratorDL(self, value): - self.ramp_generator_dls.append(value) - def insert_rampGeneratorDL_at(self, index, value): - self.ramp_generator_dls.insert(index, value) - def replace_rampGeneratorDL_at(self, index, value): - self.ramp_generator_dls[index] = value - def get_compoundInput(self): - return self.compound_inputs - def set_compoundInput(self, compound_inputs): - self.compound_inputs = compound_inputs - def add_compoundInput(self, value): - self.compound_inputs.append(value) - def insert_compoundInput_at(self, index, value): - self.compound_inputs.insert(index, value) - def replace_compoundInput_at(self, index, value): - self.compound_inputs[index] = value - def get_compoundInputDL(self): - return self.compound_input_dls - def set_compoundInputDL(self, compound_input_dls): - self.compound_input_dls = compound_input_dls - def add_compoundInputDL(self, value): - self.compound_input_dls.append(value) - def insert_compoundInputDL_at(self, index, value): - self.compound_input_dls.insert(index, value) - def replace_compoundInputDL_at(self, index, value): - self.compound_input_dls[index] = value - def get_voltageClamp(self): - return self.voltage_clamps - def set_voltageClamp(self, voltage_clamps): - self.voltage_clamps = voltage_clamps - def add_voltageClamp(self, value): - self.voltage_clamps.append(value) - def insert_voltageClamp_at(self, index, value): - self.voltage_clamps.insert(index, value) - def replace_voltageClamp_at(self, index, value): - self.voltage_clamps[index] = value - def get_voltageClampTriple(self): - return self.voltage_clamp_triples - def set_voltageClampTriple(self, voltage_clamp_triples): - self.voltage_clamp_triples = voltage_clamp_triples - def add_voltageClampTriple(self, value): - self.voltage_clamp_triples.append(value) - def insert_voltageClampTriple_at(self, index, value): - self.voltage_clamp_triples.insert(index, value) - def replace_voltageClampTriple_at(self, index, value): - self.voltage_clamp_triples[index] = value - def get_spikeArray(self): - return self.spike_arrays - def set_spikeArray(self, spike_arrays): - self.spike_arrays = spike_arrays - def add_spikeArray(self, value): - self.spike_arrays.append(value) - def insert_spikeArray_at(self, index, value): - self.spike_arrays.insert(index, value) - def replace_spikeArray_at(self, index, value): - self.spike_arrays[index] = value - def get_timedSynapticInput(self): - return self.timed_synaptic_inputs - def set_timedSynapticInput(self, timed_synaptic_inputs): - self.timed_synaptic_inputs = timed_synaptic_inputs - def add_timedSynapticInput(self, value): - self.timed_synaptic_inputs.append(value) - def insert_timedSynapticInput_at(self, index, value): - self.timed_synaptic_inputs.insert(index, value) - def replace_timedSynapticInput_at(self, index, value): - self.timed_synaptic_inputs[index] = value - def get_spikeGenerator(self): - return self.spike_generators - def set_spikeGenerator(self, spike_generators): - self.spike_generators = spike_generators - def add_spikeGenerator(self, value): - self.spike_generators.append(value) - def insert_spikeGenerator_at(self, index, value): - self.spike_generators.insert(index, value) - def replace_spikeGenerator_at(self, index, value): - self.spike_generators[index] = value - def get_spikeGeneratorRandom(self): - return self.spike_generator_randoms - def set_spikeGeneratorRandom(self, spike_generator_randoms): - self.spike_generator_randoms = spike_generator_randoms - def add_spikeGeneratorRandom(self, value): - self.spike_generator_randoms.append(value) - def insert_spikeGeneratorRandom_at(self, index, value): - self.spike_generator_randoms.insert(index, value) - def replace_spikeGeneratorRandom_at(self, index, value): - self.spike_generator_randoms[index] = value - def get_spikeGeneratorPoisson(self): - return self.spike_generator_poissons - def set_spikeGeneratorPoisson(self, spike_generator_poissons): - self.spike_generator_poissons = spike_generator_poissons - def add_spikeGeneratorPoisson(self, value): - self.spike_generator_poissons.append(value) - def insert_spikeGeneratorPoisson_at(self, index, value): - self.spike_generator_poissons.insert(index, value) - def replace_spikeGeneratorPoisson_at(self, index, value): - self.spike_generator_poissons[index] = value - def get_spikeGeneratorRefPoisson(self): - return self.spike_generator_ref_poissons - def set_spikeGeneratorRefPoisson(self, spike_generator_ref_poissons): - self.spike_generator_ref_poissons = spike_generator_ref_poissons - def add_spikeGeneratorRefPoisson(self, value): - self.spike_generator_ref_poissons.append(value) - def insert_spikeGeneratorRefPoisson_at(self, index, value): - self.spike_generator_ref_poissons.insert(index, value) - def replace_spikeGeneratorRefPoisson_at(self, index, value): - self.spike_generator_ref_poissons[index] = value - def get_poissonFiringSynapse(self): - return self.poisson_firing_synapses - def set_poissonFiringSynapse(self, poisson_firing_synapses): - self.poisson_firing_synapses = poisson_firing_synapses - def add_poissonFiringSynapse(self, value): - self.poisson_firing_synapses.append(value) - def insert_poissonFiringSynapse_at(self, index, value): - self.poisson_firing_synapses.insert(index, value) - def replace_poissonFiringSynapse_at(self, index, value): - self.poisson_firing_synapses[index] = value - def get_transientPoissonFiringSynapse(self): - return self.transient_poisson_firing_synapses - def set_transientPoissonFiringSynapse(self, transient_poisson_firing_synapses): - self.transient_poisson_firing_synapses = transient_poisson_firing_synapses - def add_transientPoissonFiringSynapse(self, value): - self.transient_poisson_firing_synapses.append(value) - def insert_transientPoissonFiringSynapse_at(self, index, value): - self.transient_poisson_firing_synapses.insert(index, value) - def replace_transientPoissonFiringSynapse_at(self, index, value): - self.transient_poisson_firing_synapses[index] = value - def get_IF_curr_alpha(self): - return self.IF_curr_alpha - def set_IF_curr_alpha(self, IF_curr_alpha): - self.IF_curr_alpha = IF_curr_alpha - def add_IF_curr_alpha(self, value): - self.IF_curr_alpha.append(value) - def insert_IF_curr_alpha_at(self, index, value): - self.IF_curr_alpha.insert(index, value) - def replace_IF_curr_alpha_at(self, index, value): - self.IF_curr_alpha[index] = value - def get_IF_curr_exp(self): - return self.IF_curr_exp - def set_IF_curr_exp(self, IF_curr_exp): - self.IF_curr_exp = IF_curr_exp - def add_IF_curr_exp(self, value): - self.IF_curr_exp.append(value) - def insert_IF_curr_exp_at(self, index, value): - self.IF_curr_exp.insert(index, value) - def replace_IF_curr_exp_at(self, index, value): - self.IF_curr_exp[index] = value - def get_IF_cond_alpha(self): - return self.IF_cond_alpha - def set_IF_cond_alpha(self, IF_cond_alpha): - self.IF_cond_alpha = IF_cond_alpha - def add_IF_cond_alpha(self, value): - self.IF_cond_alpha.append(value) - def insert_IF_cond_alpha_at(self, index, value): - self.IF_cond_alpha.insert(index, value) - def replace_IF_cond_alpha_at(self, index, value): - self.IF_cond_alpha[index] = value - def get_IF_cond_exp(self): - return self.IF_cond_exp - def set_IF_cond_exp(self, IF_cond_exp): - self.IF_cond_exp = IF_cond_exp - def add_IF_cond_exp(self, value): - self.IF_cond_exp.append(value) - def insert_IF_cond_exp_at(self, index, value): - self.IF_cond_exp.insert(index, value) - def replace_IF_cond_exp_at(self, index, value): - self.IF_cond_exp[index] = value - def get_EIF_cond_exp_isfa_ista(self): - return self.EIF_cond_exp_isfa_ista - def set_EIF_cond_exp_isfa_ista(self, EIF_cond_exp_isfa_ista): - self.EIF_cond_exp_isfa_ista = EIF_cond_exp_isfa_ista - def add_EIF_cond_exp_isfa_ista(self, value): - self.EIF_cond_exp_isfa_ista.append(value) - def insert_EIF_cond_exp_isfa_ista_at(self, index, value): - self.EIF_cond_exp_isfa_ista.insert(index, value) - def replace_EIF_cond_exp_isfa_ista_at(self, index, value): - self.EIF_cond_exp_isfa_ista[index] = value - def get_EIF_cond_alpha_isfa_ista(self): - return self.EIF_cond_alpha_isfa_ista - def set_EIF_cond_alpha_isfa_ista(self, EIF_cond_alpha_isfa_ista): - self.EIF_cond_alpha_isfa_ista = EIF_cond_alpha_isfa_ista - def add_EIF_cond_alpha_isfa_ista(self, value): - self.EIF_cond_alpha_isfa_ista.append(value) - def insert_EIF_cond_alpha_isfa_ista_at(self, index, value): - self.EIF_cond_alpha_isfa_ista.insert(index, value) - def replace_EIF_cond_alpha_isfa_ista_at(self, index, value): - self.EIF_cond_alpha_isfa_ista[index] = value - def get_HH_cond_exp(self): - return self.HH_cond_exp - def set_HH_cond_exp(self, HH_cond_exp): - self.HH_cond_exp = HH_cond_exp - def add_HH_cond_exp(self, value): - self.HH_cond_exp.append(value) - def insert_HH_cond_exp_at(self, index, value): - self.HH_cond_exp.insert(index, value) - def replace_HH_cond_exp_at(self, index, value): - self.HH_cond_exp[index] = value - def get_expCondSynapse(self): - return self.exp_cond_synapses - def set_expCondSynapse(self, exp_cond_synapses): - self.exp_cond_synapses = exp_cond_synapses - def add_expCondSynapse(self, value): - self.exp_cond_synapses.append(value) - def insert_expCondSynapse_at(self, index, value): - self.exp_cond_synapses.insert(index, value) - def replace_expCondSynapse_at(self, index, value): - self.exp_cond_synapses[index] = value - def get_alphaCondSynapse(self): - return self.alpha_cond_synapses - def set_alphaCondSynapse(self, alpha_cond_synapses): - self.alpha_cond_synapses = alpha_cond_synapses - def add_alphaCondSynapse(self, value): - self.alpha_cond_synapses.append(value) - def insert_alphaCondSynapse_at(self, index, value): - self.alpha_cond_synapses.insert(index, value) - def replace_alphaCondSynapse_at(self, index, value): - self.alpha_cond_synapses[index] = value - def get_expCurrSynapse(self): - return self.exp_curr_synapses - def set_expCurrSynapse(self, exp_curr_synapses): - self.exp_curr_synapses = exp_curr_synapses - def add_expCurrSynapse(self, value): - self.exp_curr_synapses.append(value) - def insert_expCurrSynapse_at(self, index, value): - self.exp_curr_synapses.insert(index, value) - def replace_expCurrSynapse_at(self, index, value): - self.exp_curr_synapses[index] = value - def get_alphaCurrSynapse(self): - return self.alpha_curr_synapses - def set_alphaCurrSynapse(self, alpha_curr_synapses): - self.alpha_curr_synapses = alpha_curr_synapses - def add_alphaCurrSynapse(self, value): - self.alpha_curr_synapses.append(value) - def insert_alphaCurrSynapse_at(self, index, value): - self.alpha_curr_synapses.insert(index, value) - def replace_alphaCurrSynapse_at(self, index, value): - self.alpha_curr_synapses[index] = value - def get_SpikeSourcePoisson(self): - return self.SpikeSourcePoisson - def set_SpikeSourcePoisson(self, SpikeSourcePoisson): - self.SpikeSourcePoisson = SpikeSourcePoisson - def add_SpikeSourcePoisson(self, value): - self.SpikeSourcePoisson.append(value) - def insert_SpikeSourcePoisson_at(self, index, value): - self.SpikeSourcePoisson.insert(index, value) - def replace_SpikeSourcePoisson_at(self, index, value): - self.SpikeSourcePoisson[index] = value - def get_network(self): - return self.networks - def set_network(self, networks): - self.networks = networks - def add_network(self, value): - self.networks.append(value) - def insert_network_at(self, index, value): - self.networks.insert(index, value) - def replace_network_at(self, index, value): - self.networks[index] = value - def get_ComponentType(self): - return self.ComponentType - def set_ComponentType(self, ComponentType): - self.ComponentType = ComponentType - def add_ComponentType(self, value): - self.ComponentType.append(value) - def insert_ComponentType_at(self, index, value): - self.ComponentType.insert(index, value) - def replace_ComponentType_at(self, index, value): - self.ComponentType[index] = value def _hasContent(self): if ( self.includes or @@ -22424,16 +19140,6 @@ def factory(*args_, **kwargs_): else: return BasePynnSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_tau_syn(self): - return self.tau_syn - def set_tau_syn(self, tau_syn): - self.tau_syn = tau_syn - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( super(BasePynnSynapse, self)._hasContent() @@ -22547,32 +19253,6 @@ def factory(*args_, **kwargs_): else: return basePyNNCell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_cm(self): - return self.cm - def set_cm(self, cm): - self.cm = cm - def get_i_offset(self): - return self.i_offset - def set_i_offset(self, i_offset): - self.i_offset = i_offset - def get_tau_syn_E(self): - return self.tau_syn_E - def set_tau_syn_E(self, tau_syn_E): - self.tau_syn_E = tau_syn_E - def get_tau_syn_I(self): - return self.tau_syn_I - def set_tau_syn_I(self, tau_syn_I): - self.tau_syn_I = tau_syn_I - def get_v_init(self): - return self.v_init - def set_v_init(self, v_init): - self.v_init = v_init - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( super(basePyNNCell, self)._hasContent() @@ -22723,40 +19403,6 @@ def factory(*args_, **kwargs_): else: return ContinuousProjection(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_continuousConnection(self): - return self.continuous_connections - def set_continuousConnection(self, continuous_connections): - self.continuous_connections = continuous_connections - def add_continuousConnection(self, value): - self.continuous_connections.append(value) - def insert_continuousConnection_at(self, index, value): - self.continuous_connections.insert(index, value) - def replace_continuousConnection_at(self, index, value): - self.continuous_connections[index] = value - def get_continuousConnectionInstance(self): - return self.continuous_connection_instances - def set_continuousConnectionInstance(self, continuous_connection_instances): - self.continuous_connection_instances = continuous_connection_instances - def add_continuousConnectionInstance(self, value): - self.continuous_connection_instances.append(value) - def insert_continuousConnectionInstance_at(self, index, value): - self.continuous_connection_instances.insert(index, value) - def replace_continuousConnectionInstance_at(self, index, value): - self.continuous_connection_instances[index] = value - def get_continuousConnectionInstanceW(self): - return self.continuous_connection_instance_ws - def set_continuousConnectionInstanceW(self, continuous_connection_instance_ws): - self.continuous_connection_instance_ws = continuous_connection_instance_ws - def add_continuousConnectionInstanceW(self, value): - self.continuous_connection_instance_ws.append(value) - def insert_continuousConnectionInstanceW_at(self, index, value): - self.continuous_connection_instance_ws.insert(index, value) - def replace_continuousConnectionInstanceW_at(self, index, value): - self.continuous_connection_instance_ws[index] = value def _hasContent(self): if ( self.continuous_connections or @@ -22968,40 +19614,6 @@ def factory(*args_, **kwargs_): else: return ElectricalProjection(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_electricalConnection(self): - return self.electrical_connections - def set_electricalConnection(self, electrical_connections): - self.electrical_connections = electrical_connections - def add_electricalConnection(self, value): - self.electrical_connections.append(value) - def insert_electricalConnection_at(self, index, value): - self.electrical_connections.insert(index, value) - def replace_electricalConnection_at(self, index, value): - self.electrical_connections[index] = value - def get_electricalConnectionInstance(self): - return self.electrical_connection_instances - def set_electricalConnectionInstance(self, electrical_connection_instances): - self.electrical_connection_instances = electrical_connection_instances - def add_electricalConnectionInstance(self, value): - self.electrical_connection_instances.append(value) - def insert_electricalConnectionInstance_at(self, index, value): - self.electrical_connection_instances.insert(index, value) - def replace_electricalConnectionInstance_at(self, index, value): - self.electrical_connection_instances[index] = value - def get_electricalConnectionInstanceW(self): - return self.electrical_connection_instance_ws - def set_electricalConnectionInstanceW(self, electrical_connection_instance_ws): - self.electrical_connection_instance_ws = electrical_connection_instance_ws - def add_electricalConnectionInstanceW(self, value): - self.electrical_connection_instance_ws.append(value) - def insert_electricalConnectionInstanceW_at(self, index, value): - self.electrical_connection_instance_ws.insert(index, value) - def replace_electricalConnectionInstanceW_at(self, index, value): - self.electrical_connection_instance_ws[index] = value def _hasContent(self): if ( self.electrical_connections or @@ -23210,36 +19822,6 @@ def factory(*args_, **kwargs_): else: return BaseConnectionNewFormat(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_preCell(self): - return self.pre_cell - def set_preCell(self, pre_cell): - self.pre_cell = pre_cell - def get_preSegment(self): - return self.pre_segment - def set_preSegment(self, pre_segment): - self.pre_segment = pre_segment - def get_preFractionAlong(self): - return self.pre_fraction_along - def set_preFractionAlong(self, pre_fraction_along): - self.pre_fraction_along = pre_fraction_along - def get_postCell(self): - return self.post_cell - def set_postCell(self, post_cell): - self.post_cell = post_cell - def get_postSegment(self): - return self.post_segment - def set_postSegment(self, post_segment): - self.post_segment = post_segment - def get_postFractionAlong(self): - return self.post_fraction_along - def set_postFractionAlong(self, post_fraction_along): - self.post_fraction_along = post_fraction_along - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -23427,36 +20009,6 @@ def factory(*args_, **kwargs_): else: return BaseConnectionOldFormat(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_preCellId(self): - return self.pre_cell_id - def set_preCellId(self, pre_cell_id): - self.pre_cell_id = pre_cell_id - def get_preSegmentId(self): - return self.pre_segment_id - def set_preSegmentId(self, pre_segment_id): - self.pre_segment_id = pre_segment_id - def get_preFractionAlong(self): - return self.pre_fraction_along - def set_preFractionAlong(self, pre_fraction_along): - self.pre_fraction_along = pre_fraction_along - def get_postCellId(self): - return self.post_cell_id - def set_postCellId(self, post_cell_id): - self.post_cell_id = post_cell_id - def get_postSegmentId(self): - return self.post_segment_id - def set_postSegmentId(self, post_segment_id): - self.post_segment_id = post_segment_id - def get_postFractionAlong(self): - return self.post_fraction_along - def set_postFractionAlong(self, post_fraction_along): - self.post_fraction_along = post_fraction_along - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -23639,34 +20191,6 @@ def factory(*args_, **kwargs_): else: return Projection(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_connection(self): - return self.connections - def set_connection(self, connections): - self.connections = connections - def add_connection(self, value): - self.connections.append(value) - def insert_connection_at(self, index, value): - self.connections.insert(index, value) - def replace_connection_at(self, index, value): - self.connections[index] = value - def get_connectionWD(self): - return self.connection_wds - def set_connectionWD(self, connection_wds): - self.connection_wds = connection_wds - def add_connectionWD(self, value): - self.connection_wds.append(value) - def insert_connectionWD_at(self, index, value): - self.connection_wds.insert(index, value) - def replace_connectionWD_at(self, index, value): - self.connection_wds[index] = value - def get_synapse(self): - return self.synapse - def set_synapse(self, synapse): - self.synapse = synapse def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -23881,14 +20405,6 @@ def factory(*args_, **kwargs_): else: return SpikeGeneratorRefPoisson(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_minimumISI(self): - return self.minimum_isi - def set_minimumISI(self, minimum_isi): - self.minimum_isi = minimum_isi def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -23988,14 +20504,6 @@ def factory(*args_, **kwargs_): else: return ConcentrationModel_D(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_type(self): - return self.type - def set_type(self, type): - self.type = type def _hasContent(self): if ( super(ConcentrationModel_D, self)._hasContent() @@ -24080,10 +20588,6 @@ def factory(*args_, **kwargs_): else: return ChannelDensityNernstCa2(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(ChannelDensityNernstCa2, self)._hasContent() @@ -24164,14 +20668,6 @@ def factory(*args_, **kwargs_): else: return ChannelDensityVShift(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_vShift(self): - return self.v_shift - def set_vShift(self, v_shift): - self.v_shift = v_shift def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -24288,28 +20784,6 @@ def factory(*args_, **kwargs_): else: return Cell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_morphology(self): - return self.morphology - def set_morphology(self, morphology): - self.morphology = morphology - def get_biophysicalProperties(self): - return self.biophysical_properties - def set_biophysicalProperties(self, biophysical_properties): - self.biophysical_properties = biophysical_properties - def get_morphology_attr(self): - return self.morphology_attr - def set_morphology_attr(self, morphology_attr): - self.morphology_attr = morphology_attr - def get_biophysical_properties_attr(self): - return self.biophysical_properties_attr - def set_biophysical_properties_attr(self, biophysical_properties_attr): - self.biophysical_properties_attr = biophysical_properties_attr - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -24862,94 +21336,6 @@ def factory(*args_, **kwargs_): else: return PinskyRinzelCA3Cell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_iSoma(self): - return self.i_soma - def set_iSoma(self, i_soma): - self.i_soma = i_soma - def get_iDend(self): - return self.i_dend - def set_iDend(self, i_dend): - self.i_dend = i_dend - def get_gc(self): - return self.gc - def set_gc(self, gc): - self.gc = gc - def get_gLs(self): - return self.g_ls - def set_gLs(self, g_ls): - self.g_ls = g_ls - def get_gLd(self): - return self.g_ld - def set_gLd(self, g_ld): - self.g_ld = g_ld - def get_gNa(self): - return self.g_na - def set_gNa(self, g_na): - self.g_na = g_na - def get_gKdr(self): - return self.g_kdr - def set_gKdr(self, g_kdr): - self.g_kdr = g_kdr - def get_gCa(self): - return self.g_ca - def set_gCa(self, g_ca): - self.g_ca = g_ca - def get_gKahp(self): - return self.g_kahp - def set_gKahp(self, g_kahp): - self.g_kahp = g_kahp - def get_gKC(self): - return self.g_kc - def set_gKC(self, g_kc): - self.g_kc = g_kc - def get_gNmda(self): - return self.g_nmda - def set_gNmda(self, g_nmda): - self.g_nmda = g_nmda - def get_gAmpa(self): - return self.g_ampa - def set_gAmpa(self, g_ampa): - self.g_ampa = g_ampa - def get_eNa(self): - return self.e_na - def set_eNa(self, e_na): - self.e_na = e_na - def get_eCa(self): - return self.e_ca - def set_eCa(self, e_ca): - self.e_ca = e_ca - def get_eK(self): - return self.e_k - def set_eK(self, e_k): - self.e_k = e_k - def get_eL(self): - return self.e_l - def set_eL(self, e_l): - self.e_l = e_l - def get_qd0(self): - return self.qd0 - def set_qd0(self, qd0): - self.qd0 = qd0 - def get_pp(self): - return self.pp - def set_pp(self, pp): - self.pp = pp - def get_alphac(self): - return self.alphac - def set_alphac(self, alphac): - self.alphac = alphac - def get_betac(self): - return self.betac - def set_betac(self, betac): - self.betac = betac - def get_cm(self): - return self.cm - def set_cm(self, cm): - self.cm = cm def validate_Nml2Quantity_currentDensity(self, value): # Validate type Nml2Quantity_currentDensity, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -25268,34 +21654,6 @@ def factory(*args_, **kwargs_): else: return FitzHughNagumo1969Cell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_a(self): - return self.a - def set_a(self, a): - self.a = a - def get_b(self): - return self.b - def set_b(self, b): - self.b = b - def get_I(self): - return self.I - def set_I(self, I): - self.I = I - def get_phi(self): - return self.phi - def set_phi(self, phi): - self.phi = phi - def get_V0(self): - return self.V0 - def set_V0(self, V0): - self.V0 = V0 - def get_W0(self): - return self.W0 - def set_W0(self, W0): - self.W0 = W0 def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -25435,14 +21793,6 @@ def factory(*args_, **kwargs_): else: return FitzHughNagumoCell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_I(self): - return self.I - def set_I(self, I): - self.I = I def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -25546,16 +21896,6 @@ def factory(*args_, **kwargs_): else: return BaseCellMembPotCap(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_C(self): - return self.C - def set_C(self, C): - self.C = C - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_capacitance(self, value): # Validate type Nml2Quantity_capacitance, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -25682,34 +22022,6 @@ def factory(*args_, **kwargs_): else: return IzhikevichCell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_v0(self): - return self.v0 - def set_v0(self, v0): - self.v0 = v0 - def get_thresh(self): - return self.thresh - def set_thresh(self, thresh): - self.thresh = thresh - def get_a(self): - return self.a - def set_a(self, a): - self.a = a - def get_b(self): - return self.b - def set_b(self, b): - self.b = b - def get_c(self): - return self.c - def set_c(self, c): - self.c = c - def get_d(self): - return self.d - def set_d(self, d): - self.d = d def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -25873,32 +22185,6 @@ def factory(*args_, **kwargs_): else: return IafCell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_leakReversal(self): - return self.leak_reversal - def set_leakReversal(self, leak_reversal): - self.leak_reversal = leak_reversal - def get_thresh(self): - return self.thresh - def set_thresh(self, thresh): - self.thresh = thresh - def get_reset(self): - return self.reset - def set_reset(self, reset): - self.reset = reset - def get_C(self): - return self.C - def set_C(self, C): - self.C = C - def get_leakConductance(self): - return self.leak_conductance - def set_leakConductance(self, leak_conductance): - self.leak_conductance = leak_conductance - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -26074,28 +22360,6 @@ def factory(*args_, **kwargs_): else: return IafTauCell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_leakReversal(self): - return self.leak_reversal - def set_leakReversal(self, leak_reversal): - self.leak_reversal = leak_reversal - def get_thresh(self): - return self.thresh - def set_thresh(self, thresh): - self.thresh = thresh - def get_reset(self): - return self.reset - def set_reset(self, reset): - self.reset = reset - def get_tau(self): - return self.tau - def set_tau(self, tau): - self.tau = tau - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -26257,30 +22521,6 @@ def factory(*args_, **kwargs_): else: return GradedSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_conductance(self): - return self.conductance - def set_conductance(self, conductance): - self.conductance = conductance - def get_delta(self): - return self.delta - def set_delta(self, delta): - self.delta = delta - def get_Vth(self): - return self.Vth - def set_Vth(self, Vth): - self.Vth = Vth - def get_k(self): - return self.k - def set_k(self, k): - self.k = k - def get_erev(self): - return self.erev - def set_erev(self, erev): - self.erev = erev def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -26437,14 +22677,6 @@ def factory(*args_, **kwargs_): else: return LinearGradedSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_conductance(self): - return self.conductance - def set_conductance(self, conductance): - self.conductance = conductance def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -26544,10 +22776,6 @@ def factory(*args_, **kwargs_): else: return SilentSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(SilentSynapse, self)._hasContent() @@ -26631,14 +22859,6 @@ def factory(*args_, **kwargs_): else: return GapJunction(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_conductance(self): - return self.conductance - def set_conductance(self, conductance): - self.conductance = conductance def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -26736,12 +22956,6 @@ def factory(*args_, **kwargs_): else: return BaseCurrentBasedSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( super(BaseCurrentBasedSynapse, self)._hasContent() @@ -26832,12 +23046,6 @@ def factory(*args_, **kwargs_): else: return BaseVoltageDepSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( super(BaseVoltageDepSynapse, self)._hasContent() @@ -26989,104 +23197,6 @@ def factory(*args_, **kwargs_): else: return IonChannel(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_gate(self): - return self.gates - def set_gate(self, gates): - self.gates = gates - def add_gate(self, value): - self.gates.append(value) - def insert_gate_at(self, index, value): - self.gates.insert(index, value) - def replace_gate_at(self, index, value): - self.gates[index] = value - def get_gateHHrates(self): - return self.gate_hh_rates - def set_gateHHrates(self, gate_hh_rates): - self.gate_hh_rates = gate_hh_rates - def add_gateHHrates(self, value): - self.gate_hh_rates.append(value) - def insert_gateHHrates_at(self, index, value): - self.gate_hh_rates.insert(index, value) - def replace_gateHHrates_at(self, index, value): - self.gate_hh_rates[index] = value - def get_gateHHratesTau(self): - return self.gate_h_hrates_taus - def set_gateHHratesTau(self, gate_h_hrates_taus): - self.gate_h_hrates_taus = gate_h_hrates_taus - def add_gateHHratesTau(self, value): - self.gate_h_hrates_taus.append(value) - def insert_gateHHratesTau_at(self, index, value): - self.gate_h_hrates_taus.insert(index, value) - def replace_gateHHratesTau_at(self, index, value): - self.gate_h_hrates_taus[index] = value - def get_gateHHtauInf(self): - return self.gate_hh_tau_infs - def set_gateHHtauInf(self, gate_hh_tau_infs): - self.gate_hh_tau_infs = gate_hh_tau_infs - def add_gateHHtauInf(self, value): - self.gate_hh_tau_infs.append(value) - def insert_gateHHtauInf_at(self, index, value): - self.gate_hh_tau_infs.insert(index, value) - def replace_gateHHtauInf_at(self, index, value): - self.gate_hh_tau_infs[index] = value - def get_gateHHratesInf(self): - return self.gate_h_hrates_infs - def set_gateHHratesInf(self, gate_h_hrates_infs): - self.gate_h_hrates_infs = gate_h_hrates_infs - def add_gateHHratesInf(self, value): - self.gate_h_hrates_infs.append(value) - def insert_gateHHratesInf_at(self, index, value): - self.gate_h_hrates_infs.insert(index, value) - def replace_gateHHratesInf_at(self, index, value): - self.gate_h_hrates_infs[index] = value - def get_gateHHratesTauInf(self): - return self.gate_h_hrates_tau_infs - def set_gateHHratesTauInf(self, gate_h_hrates_tau_infs): - self.gate_h_hrates_tau_infs = gate_h_hrates_tau_infs - def add_gateHHratesTauInf(self, value): - self.gate_h_hrates_tau_infs.append(value) - def insert_gateHHratesTauInf_at(self, index, value): - self.gate_h_hrates_tau_infs.insert(index, value) - def replace_gateHHratesTauInf_at(self, index, value): - self.gate_h_hrates_tau_infs[index] = value - def get_gateHHInstantaneous(self): - return self.gate_hh_instantaneouses - def set_gateHHInstantaneous(self, gate_hh_instantaneouses): - self.gate_hh_instantaneouses = gate_hh_instantaneouses - def add_gateHHInstantaneous(self, value): - self.gate_hh_instantaneouses.append(value) - def insert_gateHHInstantaneous_at(self, index, value): - self.gate_hh_instantaneouses.insert(index, value) - def replace_gateHHInstantaneous_at(self, index, value): - self.gate_hh_instantaneouses[index] = value - def get_gateFractional(self): - return self.gate_fractionals - def set_gateFractional(self, gate_fractionals): - self.gate_fractionals = gate_fractionals - def add_gateFractional(self, value): - self.gate_fractionals.append(value) - def insert_gateFractional_at(self, index, value): - self.gate_fractionals.insert(index, value) - def replace_gateFractional_at(self, index, value): - self.gate_fractionals[index] = value - def get_species(self): - return self.species - def set_species(self, species): - self.species = species - def get_type(self): - return self.type - def set_type(self, type): - self.type = type - def get_conductance(self): - return self.conductance - def set_conductance(self, conductance): - self.conductance = conductance - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -27310,10 +23420,6 @@ def factory(*args_, **kwargs_): else: return AlphaCurrSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(AlphaCurrSynapse, self)._hasContent() @@ -27391,10 +23497,6 @@ def factory(*args_, **kwargs_): else: return ExpCurrSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(ExpCurrSynapse, self)._hasContent() @@ -27475,14 +23577,6 @@ def factory(*args_, **kwargs_): else: return AlphaCondSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_e_rev(self): - return self.e_rev - def set_e_rev(self, e_rev): - self.e_rev = e_rev def _hasContent(self): if ( super(AlphaCondSynapse, self)._hasContent() @@ -27571,14 +23665,6 @@ def factory(*args_, **kwargs_): else: return ExpCondSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_e_rev(self): - return self.e_rev - def set_e_rev(self, e_rev): - self.e_rev = e_rev def _hasContent(self): if ( super(ExpCondSynapse, self)._hasContent() @@ -27691,46 +23777,6 @@ def factory(*args_, **kwargs_): else: return HH_cond_exp(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_v_offset(self): - return self.v_offset - def set_v_offset(self, v_offset): - self.v_offset = v_offset - def get_e_rev_E(self): - return self.e_rev_E - def set_e_rev_E(self, e_rev_E): - self.e_rev_E = e_rev_E - def get_e_rev_I(self): - return self.e_rev_I - def set_e_rev_I(self, e_rev_I): - self.e_rev_I = e_rev_I - def get_e_rev_K(self): - return self.e_rev_K - def set_e_rev_K(self, e_rev_K): - self.e_rev_K = e_rev_K - def get_e_rev_Na(self): - return self.e_rev_Na - def set_e_rev_Na(self, e_rev_Na): - self.e_rev_Na = e_rev_Na - def get_e_rev_leak(self): - return self.e_rev_leak - def set_e_rev_leak(self, e_rev_leak): - self.e_rev_leak = e_rev_leak - def get_g_leak(self): - return self.g_leak - def set_g_leak(self, g_leak): - self.g_leak = g_leak - def get_gbar_K(self): - return self.gbar_K - def set_gbar_K(self, gbar_K): - self.gbar_K = gbar_K - def get_gbar_Na(self): - return self.gbar_Na - def set_gbar_Na(self, gbar_Na): - self.gbar_Na = gbar_Na def _hasContent(self): if ( super(HH_cond_exp, self)._hasContent() @@ -27896,32 +23942,6 @@ def factory(*args_, **kwargs_): else: return basePyNNIaFCell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_tau_m(self): - return self.tau_m - def set_tau_m(self, tau_m): - self.tau_m = tau_m - def get_tau_refrac(self): - return self.tau_refrac - def set_tau_refrac(self, tau_refrac): - self.tau_refrac = tau_refrac - def get_v_reset(self): - return self.v_reset - def set_v_reset(self, v_reset): - self.v_reset = v_reset - def get_v_rest(self): - return self.v_rest - def set_v_rest(self, v_rest): - self.v_rest = v_rest - def get_v_thresh(self): - return self.v_thresh - def set_v_thresh(self, v_thresh): - self.v_thresh = v_thresh - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( super(basePyNNIaFCell, self)._hasContent() @@ -28061,20 +24081,6 @@ def factory(*args_, **kwargs_): else: return ContinuousConnection(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_preComponent(self): - return self.pre_component - def set_preComponent(self, pre_component): - self.pre_component = pre_component - def get_postComponent(self): - return self.post_component - def set_postComponent(self, post_component): - self.post_component = post_component - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -28266,16 +24272,6 @@ def factory(*args_, **kwargs_): else: return ElectricalConnection(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_synapse(self): - return self.synapse - def set_synapse(self, synapse): - self.synapse = synapse - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -28460,18 +24456,6 @@ def factory(*args_, **kwargs_): else: return ConnectionWD(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_weight(self): - return self.weight - def set_weight(self, weight): - self.weight = weight - def get_delay(self): - return self.delay - def set_delay(self, delay): - self.delay = delay def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -28663,10 +24647,6 @@ def factory(*args_, **kwargs_): else: return Connection(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(Connection, self)._hasContent() @@ -28815,14 +24795,6 @@ def factory(*args_, **kwargs_): else: return Cell2CaPools(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_biophysicalProperties2CaPools(self): - return self.biophysical_properties2_ca_pools - def set_biophysicalProperties2CaPools(self, biophysical_properties2_ca_pools): - self.biophysical_properties2_ca_pools = biophysical_properties2_ca_pools def _hasContent(self): if ( self.biophysical_properties2_ca_pools is not None or @@ -28942,50 +24914,6 @@ def factory(*args_, **kwargs_): else: return AdExIaFCell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_gL(self): - return self.g_l - def set_gL(self, g_l): - self.g_l = g_l - def get_EL(self): - return self.EL - def set_EL(self, EL): - self.EL = EL - def get_reset(self): - return self.reset - def set_reset(self, reset): - self.reset = reset - def get_VT(self): - return self.VT - def set_VT(self, VT): - self.VT = VT - def get_thresh(self): - return self.thresh - def set_thresh(self, thresh): - self.thresh = thresh - def get_delT(self): - return self.del_t - def set_delT(self, del_t): - self.del_t = del_t - def get_tauw(self): - return self.tauw - def set_tauw(self, tauw): - self.tauw = tauw - def get_refract(self): - return self.refract - def set_refract(self, refract): - self.refract = refract - def get_a(self): - return self.a - def set_a(self, a): - self.a = a - def get_b(self): - return self.b - def set_b(self, b): - self.b = b def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -29214,46 +25142,6 @@ def factory(*args_, **kwargs_): else: return Izhikevich2007Cell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_v0(self): - return self.v0 - def set_v0(self, v0): - self.v0 = v0 - def get_k(self): - return self.k - def set_k(self, k): - self.k = k - def get_vr(self): - return self.vr - def set_vr(self, vr): - self.vr = vr - def get_vt(self): - return self.vt - def set_vt(self, vt): - self.vt = vt - def get_vpeak(self): - return self.vpeak - def set_vpeak(self, vpeak): - self.vpeak = vpeak - def get_a(self): - return self.a - def set_a(self, a): - self.a = a - def get_b(self): - return self.b - def set_b(self, b): - self.b = b - def get_c(self): - return self.c - def set_c(self, c): - self.c = c - def get_d(self): - return self.d - def set_d(self, d): - self.d = d def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -29461,14 +25349,6 @@ def factory(*args_, **kwargs_): else: return IafRefCell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_refract(self): - return self.refract - def set_refract(self, refract): - self.refract = refract def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -29568,14 +25448,6 @@ def factory(*args_, **kwargs_): else: return IafTauRefCell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_refract(self): - return self.refract - def set_refract(self, refract): - self.refract = refract def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -29684,26 +25556,6 @@ def factory(*args_, **kwargs_): else: return DoubleSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_synapse1(self): - return self.synapse1 - def set_synapse1(self, synapse1): - self.synapse1 = synapse1 - def get_synapse2(self): - return self.synapse2 - def set_synapse2(self, synapse2): - self.synapse2 = synapse2 - def get_synapse1Path(self): - return self.synapse1_path - def set_synapse1Path(self, synapse1_path): - self.synapse1_path = synapse1_path - def get_synapse2Path(self): - return self.synapse2_path - def set_synapse2Path(self, synapse2_path): - self.synapse2_path = synapse2_path def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -29828,18 +25680,6 @@ def factory(*args_, **kwargs_): else: return AlphaCurrentSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_tau(self): - return self.tau - def set_tau(self, tau): - self.tau = tau - def get_ibase(self): - return self.ibase - def set_ibase(self, ibase): - self.ibase = ibase def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -29965,24 +25805,6 @@ def factory(*args_, **kwargs_): else: return BaseConductanceBasedSynapseTwo(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_gbase1(self): - return self.gbase1 - def set_gbase1(self, gbase1): - self.gbase1 = gbase1 - def get_gbase2(self): - return self.gbase2 - def set_gbase2(self, gbase2): - self.gbase2 = gbase2 - def get_erev(self): - return self.erev - def set_erev(self, erev): - self.erev = erev - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -30125,20 +25947,6 @@ def factory(*args_, **kwargs_): else: return BaseConductanceBasedSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_gbase(self): - return self.gbase - def set_gbase(self, gbase): - self.gbase = gbase - def get_erev(self): - return self.erev - def set_erev(self, erev): - self.erev = erev - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -30272,14 +26080,6 @@ def factory(*args_, **kwargs_): else: return IonChannelVShift(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_vShift(self): - return self.v_shift - def set_vShift(self, v_shift): - self.v_shift = v_shift def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -30380,10 +26180,6 @@ def factory(*args_, **kwargs_): else: return IonChannelHH(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(IonChannelHH, self)._hasContent() @@ -30461,10 +26257,6 @@ def factory(*args_, **kwargs_): else: return IF_curr_exp(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(IF_curr_exp, self)._hasContent() @@ -30542,10 +26334,6 @@ def factory(*args_, **kwargs_): else: return IF_curr_alpha(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(IF_curr_alpha, self)._hasContent() @@ -30630,20 +26418,6 @@ def factory(*args_, **kwargs_): else: return basePyNNIaFCondCell(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_e_rev_E(self): - return self.e_rev_E - def set_e_rev_E(self, e_rev_E): - self.e_rev_E = e_rev_E - def get_e_rev_I(self): - return self.e_rev_I - def set_e_rev_I(self, e_rev_I): - self.e_rev_I = e_rev_I - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( super(basePyNNIaFCondCell, self)._hasContent() @@ -30753,12 +26527,6 @@ def factory(*args_, **kwargs_): else: return ContinuousConnectionInstance(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( super(ContinuousConnectionInstance, self)._hasContent() @@ -30865,12 +26633,6 @@ def factory(*args_, **kwargs_): else: return ElectricalConnectionInstance(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( super(ElectricalConnectionInstance, self)._hasContent() @@ -30981,22 +26743,6 @@ def factory(*args_, **kwargs_): else: return ExpThreeSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_tauDecay1(self): - return self.tau_decay1 - def set_tauDecay1(self, tau_decay1): - self.tau_decay1 = tau_decay1 - def get_tauDecay2(self): - return self.tau_decay2 - def set_tauDecay2(self, tau_decay2): - self.tau_decay2 = tau_decay2 - def get_tauRise(self): - return self.tau_rise - def set_tauRise(self, tau_rise): - self.tau_rise = tau_rise def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -31116,20 +26862,6 @@ def factory(*args_, **kwargs_): else: return ExpTwoSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_tauDecay(self): - return self.tau_decay - def set_tauDecay(self, tau_decay): - self.tau_decay = tau_decay - def get_tauRise(self): - return self.tau_rise - def set_tauRise(self, tau_rise): - self.tau_rise = tau_rise - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -31249,14 +26981,6 @@ def factory(*args_, **kwargs_): else: return ExpOneSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_tauDecay(self): - return self.tau_decay - def set_tauDecay(self, tau_decay): - self.tau_decay = tau_decay def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -31356,14 +27080,6 @@ def factory(*args_, **kwargs_): else: return AlphaSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_tau(self): - return self.tau - def set_tau(self, tau): - self.tau = tau def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: @@ -31476,32 +27192,6 @@ def factory(*args_, **kwargs_): else: return EIF_cond_exp_isfa_ista(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_a(self): - return self.a - def set_a(self, a): - self.a = a - def get_b(self): - return self.b - def set_b(self, b): - self.b = b - def get_delta_T(self): - return self.delta_T - def set_delta_T(self, delta_T): - self.delta_T = delta_T - def get_tau_w(self): - return self.tau_w - def set_tau_w(self, tau_w): - self.tau_w = tau_w - def get_v_spike(self): - return self.v_spike - def set_v_spike(self, v_spike): - self.v_spike = v_spike - def get_extensiontype_(self): return self.extensiontype_ - def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def _hasContent(self): if ( super(EIF_cond_exp_isfa_ista, self)._hasContent() @@ -31631,10 +27321,6 @@ def factory(*args_, **kwargs_): else: return IF_cond_exp(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(IF_cond_exp, self)._hasContent() @@ -31712,10 +27398,6 @@ def factory(*args_, **kwargs_): else: return IF_cond_alpha(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(IF_cond_alpha, self)._hasContent() @@ -31799,14 +27481,6 @@ def factory(*args_, **kwargs_): else: return ContinuousConnectionInstanceW(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_weight(self): - return self.weight - def set_weight(self, weight): - self.weight = weight def _hasContent(self): if ( super(ContinuousConnectionInstanceW, self)._hasContent() @@ -31912,14 +27586,6 @@ def factory(*args_, **kwargs_): else: return ElectricalConnectionInstanceW(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_weight(self): - return self.weight - def set_weight(self, weight): - self.weight = weight def _hasContent(self): if ( super(ElectricalConnectionInstanceW, self)._hasContent() @@ -32028,18 +27694,6 @@ def factory(*args_, **kwargs_): else: return BlockingPlasticSynapse(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix - def get_plasticityMechanism(self): - return self.plasticity_mechanism - def set_plasticityMechanism(self, plasticity_mechanism): - self.plasticity_mechanism = plasticity_mechanism - def get_blockMechanism(self): - return self.block_mechanism - def set_blockMechanism(self, block_mechanism): - self.block_mechanism = block_mechanism def _hasContent(self): if ( self.plasticity_mechanism is not None or @@ -32138,10 +27792,6 @@ def factory(*args_, **kwargs_): else: return EIF_cond_alpha_isfa_ista(*args_, **kwargs_) factory = staticmethod(factory) - def get_ns_prefix_(self): - return self.ns_prefix_ - def set_ns_prefix_(self, ns_prefix): - self.ns_prefix_ = ns_prefix def _hasContent(self): if ( super(EIF_cond_alpha_isfa_ista, self)._hasContent() From 645a3a050f29cdf72a8f6eba055ec7a39f3a3652 Mon Sep 17 00:00:00 2001 From: Padraig Gleeson Date: Fri, 17 Sep 2021 10:30:39 +0100 Subject: [PATCH 041/136] Initial notebook for HDF5 serialisation --- notebooks/HDF5Serialisation.ipynb | 114 ++++++++++++++++++++++++++++++ 1 file changed, 114 insertions(+) create mode 100644 notebooks/HDF5Serialisation.ipynb diff --git a/notebooks/HDF5Serialisation.ipynb b/notebooks/HDF5Serialisation.ipynb new file mode 100644 index 00000000..079aea46 --- /dev/null +++ b/notebooks/HDF5Serialisation.ipynb @@ -0,0 +1,114 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Use of NeuroML HDF5 serialisation format\n", + "\n", + "\n", + "The main serialisation format for NeuroML 2 is XML, but a serialisation form based on HDF5 is also supported.\n", + "\n", + "The main advantages of this are:\n", + " \n", + "- Models can be built with libNeuroML and saved automatically in this format ([code](https://github.com/NeuralEnsemble/libNeuroML/tree/master/neuroml/hdf5))\n", + "- An identical format is used in the Java based libraries ([code](https://github.com/NeuroML/org.neuroml.model/tree/master/src/main/java/org/neuroml/model/util/hdf5))\n", + "\n", + "Note though:\n", + "\n", + "- Documentation at this time is minimal\n", + "- The serialisation concentrates on efficient storage of network properties (cell positions, connections), rather than morphologies (this is not an issue if populations with multiple identical cells are used)\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Writing to the format\n", + "\n", + "To write a model constructed in/loaded into libNeuroML to HDF5 format, use the NeuroMLHdf5Writer class (e.g. see [build_complete.py](https://github.com/NeuralEnsemble/libNeuroML/blob/master/neuroml/examples/build_complete.py))\n", + "\n", + "```\n", + "import neuroml.writers as writers\n", + "\n", + "nml_doc = ...\n", + "\n", + "writers.NeuroMLHdf5Writer.write(nml_doc, 'complete.nml.h5')\n", + "```\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Loading the HDF5 format\n", + "\n", + "You can use NeuroMLHdf5Loader:\n", + "\n", + "```\n", + "from neuroml import loaders\n", + "\n", + "nml_doc2 = loaders.NeuroMLHdf5Loader.load('complete.nml.h5')\n", + "```\n", + "\n", + "or **even better** use read_neuroml2_file, so that the included files (cells, channels) will be loaded into memory too:\n", + "\n", + "```\n", + "nml_doc2 = loaders.read_neuroml2_file(nml_h5_file, include_includes=True)\n", + "```\n", + "\n", + "See a test example [here](https://github.com/NeuralEnsemble/libNeuroML/blob/master/neuroml/test/test_hdf5_parser.py) for writing/reading the format.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Loading as \"optimized\" format\n", + "\n", + "It is also possible to load the HDF5 files as \"optimized\" format. This means:\n", + "\n", + "- Instead of constructing the tree of classes in memory for instances/connections etc. it just stores the datasets for the positions and connections in memory as HDF5 datasets and uses these when info about positions/connections are requested.\n", + "\n", + "The rest of the NeuroML (cells, channels) are loaded in the normal way.\n", + "\n", + "```\n", + "from neuroml import loaders\n", + "\n", + "nml_doc2 = loaders.read_neuroml2_file(nml_h5_file,\n", + " include_includes=True,\n", + " optimized=True)\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} From d2726a82816359de107b8c76d1e90cca961b3e64 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 27 Sep 2021 09:17:38 +0100 Subject: [PATCH 042/136] feat: format with black --- neuroml/__init__.py | 4 +- neuroml/arraymorph.py | 131 +- neuroml/arraymorph_load_time_benchmark.py | 54 +- neuroml/benchmarks/arraymorph_benchmarks.py | 176 +- neuroml/examples/arraymorph_generation.py | 60 +- neuroml/examples/build_3D_network.py | 86 +- neuroml/examples/build_complete.py | 379 +- neuroml/examples/build_network.py | 72 +- neuroml/examples/build_network2.py | 199 +- neuroml/examples/ion_channel_generation.py | 60 +- neuroml/examples/json_serialization.py | 58 +- neuroml/examples/loading_modifying_writing.py | 54 +- .../loading_modifying_writing_large.py | 65 +- neuroml/examples/morphology_generation.py | 60 +- neuroml/examples/run_all.py | 10 +- neuroml/examples/single_izhikevich_reader.py | 13 +- neuroml/examples/single_izhikevich_writer.py | 7 +- neuroml/examples/write_pynn.py | 38 +- neuroml/examples/write_syns.py | 29 +- neuroml/hdf5/DefaultNetworkHandler.py | 283 +- neuroml/hdf5/NetworkBuilder.py | 548 +- neuroml/hdf5/NetworkContainer.py | 678 +- neuroml/hdf5/NeuroMLHdf5Parser.py | 1282 +- neuroml/hdf5/NeuroMLXMLParser.py | 911 +- neuroml/hdf5/__init__.py | 29 +- neuroml/loaders.py | 364 +- neuroml/nml/config.py | 2 +- neuroml/nml/generateds_config.py | 132 +- neuroml/nml/helper_methods.py | 321 +- neuroml/nml/nml.py | 41014 ++++++++++++---- neuroml/test/misc_tests.py | 8 +- neuroml/test/test_arraymorph.py | 393 +- neuroml/test/test_cell.py | 149 +- neuroml/test/test_hdf5_optimized.py | 58 +- neuroml/test/test_hdf5_parser.py | 42 +- neuroml/test/test_integration.py | 34 +- neuroml/test/test_loaders.py | 16 +- neuroml/test/test_morphology.py | 9 +- neuroml/test/test_segment.py | 281 +- neuroml/test/test_writers.py | 173 +- neuroml/test/test_xml_parser.py | 58 +- neuroml/utils.py | 66 +- neuroml/writers.py | 166 +- 43 files changed, 35129 insertions(+), 13443 deletions(-) diff --git a/neuroml/__init__.py b/neuroml/__init__.py index 06100b2a..80bde0e5 100644 --- a/neuroml/__init__.py +++ b/neuroml/__init__.py @@ -1,7 +1,7 @@ from .nml.nml import * # allows importation of all neuroml classes -__version__ = '0.2.57' -__version_info__ = tuple(int(i) for i in __version__.split('.')) +__version__ = "0.2.57" +__version_info__ = tuple(int(i) for i in __version__.split(".")) current_neuroml_version = "v2.2" diff --git a/neuroml/arraymorph.py b/neuroml/arraymorph.py index 10d9c7d8..3e2123ee 100644 --- a/neuroml/arraymorph.py +++ b/neuroml/arraymorph.py @@ -28,16 +28,18 @@ class ArrayMorphology(neuroml.Morphology): """ - def __init__(self, - vertices=[], - connectivity=[], - id=None, - node_types=None, - name=None, - physical_mask=None, - fractions_along=None): - - super(ArrayMorphology,self).__init__() + def __init__( + self, + vertices=[], + connectivity=[], + id=None, + node_types=None, + name=None, + physical_mask=None, + fractions_along=None, + ): + + super(ArrayMorphology, self).__init__() self.connectivity = np.array(connectivity) self.vertices = np.array(vertices) @@ -45,32 +47,30 @@ def __init__(self, self.id = id if np.any(physical_mask): - self.physical_mask=np.array(physical_mask) + self.physical_mask = np.array(physical_mask) else: - self.physical_mask=np.zeros(len(connectivity),dtype='bool') + self.physical_mask = np.zeros(len(connectivity), dtype="bool") if np.any(node_types): - self.node_types=np.array(node_types) + self.node_types = np.array(node_types) else: - self.node_types=np.zeros(len(connectivity), - dtype='int32') + self.node_types = np.zeros(len(connectivity), dtype="int32") if np.any(fractions_along): self.fractions_along = np.array(fractions_along) else: - self.fractions_along=np.zeros(len(connectivity), - dtype='int32') + self.fractions_along = np.zeros(len(connectivity), dtype="int32") - #it will need a reference to its parent? + # it will need a reference to its parent? self.segments = SegmentList(self) - assert self.valid_morphology,'invalid_morphology' + assert self.valid_morphology, "invalid_morphology" @property def valid_morphology(self): all_nodes = self.__all_nodes_satisfied all_vertices = self.__all_vertices_present - return (all_nodes and all_vertices) + return all_nodes and all_vertices @property def __all_vertices_present(self): @@ -81,7 +81,7 @@ def __all_vertices_present(self): num_vertices = len(self.vertices) - return(all_vertices_present or num_vertices == 0) + return all_vertices_present or num_vertices == 0 @property def valid_ids(self): @@ -93,14 +93,13 @@ def valid_ids(self): return valid_flag - @property def __all_nodes_satisfied(self): m = self.vertices.shape[0] n = self.connectivity.shape[0] p = self.node_types.shape[0] - all_nodes_satisfied = (m == n == p) + all_nodes_satisfied = m == n == p return all_nodes_satisfied @property @@ -121,11 +120,11 @@ def physical_indices(self): physical_indices = np.where(self.physical_mask == 0)[0] return physical_indices - def children(self,index): + def children(self, index): """Returns an array with indexes of children""" return np.where(self.connectivity == index) - def to_root(self,index): + def to_root(self, index): """ Changes the connectivity matrix so that the node at index becomes the root @@ -133,37 +132,37 @@ def to_root(self,index): old_root_index = self.root_index new_root_index = index - #do a tree traversal: + # do a tree traversal: parent_index = self.connectivity[index] - grandparent_index=self.connectivity[parent_index] - while index!=old_root_index: - self.connectivity[parent_index]=index + grandparent_index = self.connectivity[parent_index] + while index != old_root_index: + self.connectivity[parent_index] = index index = parent_index parent_index = grandparent_index grandparent_index = self.connectivity[parent_index] self.connectivity[new_root_index] = -1 - def parent_id(self,index): + def parent_id(self, index): """Return the parent index for the given index""" return self.connectivity[index] - def vertex(self,index): + def vertex(self, index): """Return vertex corresponding to index in morphology""" return self.vertices[index] def __len__(self): return len(self.connectivity) - def pop(self,index): + def pop(self, index): """ TODO:This is failing tests (understandably) - need to fix! Deletes a node from the morphology, its children become children of the deleted node's parent. """ - self.vertices = np.delete(self.vertices,index) - self.node_types = np.delete(self.node_types,index) - self.connectivity = np.delete(self.connectivity,index) + self.vertices = np.delete(self.vertices, index) + self.node_types = np.delete(self.node_types, index) + self.connectivity = np.delete(self.connectivity, index) k = 0 for i in self.connectivity: @@ -172,19 +171,19 @@ def pop(self,index): k += 1 pass - def to_neuroml_morphology(self,id=""): + def to_neuroml_morphology(self, id=""): morphology = neuroml.Morphology() morphology.id = id - #need to traverse the tree: - for index in range(self.num_vertices-1): + # need to traverse the tree: + for index in range(self.num_vertices - 1): seg = self.segment_from_vertex_index(index) morphology.segments.append(seg) return morphology - def segment_from_vertex_index(self,index): + def segment_from_vertex_index(self, index): parent_index = self.connectivity[index] node_x = self.vertices[index][0] @@ -197,26 +196,20 @@ def segment_from_vertex_index(self,index): parent_z = self.vertices[parent_index][2] parent_d = self.vertices[parent_index][3] - p = neuroml.Point3DWithDiam(x=node_x, - y=node_y, - z=node_z, - diameter=node_d) + p = neuroml.Point3DWithDiam(x=node_x, y=node_y, z=node_z, diameter=node_d) - d = neuroml.Point3DWithDiam(x=parent_x, - y=parent_y, - z=parent_z, - diameter=parent_d) + d = neuroml.Point3DWithDiam( + x=parent_x, y=parent_y, z=parent_z, diameter=parent_d + ) - - seg = neuroml.Segment(proximal=p, - distal=d, - id=index) + seg = neuroml.Segment(proximal=p, distal=d, id=index) if index > 1: parent = neuroml.SegmentParent(segments=parent_index) seg.parent = parent return seg + class SegmentList(object): """ This class is a proxy, it returns a segment either @@ -224,11 +217,11 @@ class SegmentList(object): it returns the relevant segment. """ - def __init__(self,arraymorph): + def __init__(self, arraymorph): self.arraymorph = arraymorph self.instantiated_segments = {} - def __vertex_index_from_segment_index__(self,index): + def __vertex_index_from_segment_index__(self, index): """ The existence of a physical mask means that segment and and vertex indices fall out of sync. This function returns the @@ -250,18 +243,18 @@ def __len__(self): num_vertices = self.arraymorph.num_vertices num_floating = np.sum(self.arraymorph.physical_mask) - num_segments = num_vertices - num_floating -1 + num_segments = num_vertices - num_floating - 1 if num_segments < 0: num_segments = 0 return int(num_segments) - def __iadd__(self,segment_list): + def __iadd__(self, segment_list): for segment in segment_list: self.append(segment) return self - def __getitem__(self,segment_index): + def __getitem__(self, segment_index): if segment_index in self.instantiated_segments: neuroml_segment = self.instantiated_segments[segment_index] @@ -271,10 +264,10 @@ def __getitem__(self,segment_index): self.instantiated_segments[segment_index] = neuroml_segment return neuroml_segment - def __setitem__(self,index,user_set_segment): - self.instantiated_segments[index] = user_set_segment + def __setitem__(self, index, user_set_segment): + self.instantiated_segments[index] = user_set_segment - def append(self,segment): + def append(self, segment): """ Adds a new segment @@ -294,20 +287,26 @@ def append(self,segment): dist_z = segment.distal.z distal_diam = segment.distal.diameter - prox_vertex = [prox_x,prox_y,prox_z,prox_diam] - dist_vertex = [dist_x,dist_y,dist_z,distal_diam] + prox_vertex = [prox_x, prox_y, prox_z, prox_diam] + dist_vertex = [dist_x, dist_y, dist_z, distal_diam] if len(self.arraymorph.vertices) > 0: - self.arraymorph.vertices = np.append(self.arraymorph.vertices,[dist_vertex,prox_vertex],axis = 0) + self.arraymorph.vertices = np.append( + self.arraymorph.vertices, [dist_vertex, prox_vertex], axis=0 + ) else: - self.arraymorph.vertices = np.array([dist_vertex,prox_vertex]) + self.arraymorph.vertices = np.array([dist_vertex, prox_vertex]) - self.arraymorph.connectivity = np.append(self.arraymorph.connectivity,[-1,dist_vertex_index]) + self.arraymorph.connectivity = np.append( + self.arraymorph.connectivity, [-1, dist_vertex_index] + ) if len(self.arraymorph.physical_mask) == 0: - self.arraymorph.physical_mask = np.array([0,0]) + self.arraymorph.physical_mask = np.array([0, 0]) else: - self.arraymorph.physical_mask = np.append(self.arraymorph.physical_mask,[1,0]) + self.arraymorph.physical_mask = np.append( + self.arraymorph.physical_mask, [1, 0] + ) segment_index = len(self) - 1 self.instantiated_segments[segment_index] = segment diff --git a/neuroml/arraymorph_load_time_benchmark.py b/neuroml/arraymorph_load_time_benchmark.py index 9df6d8f1..c8f36185 100644 --- a/neuroml/arraymorph_load_time_benchmark.py +++ b/neuroml/arraymorph_load_time_benchmark.py @@ -6,54 +6,56 @@ class Benchmark: def __init__(self, num_segments): self.num_segments = num_segments - + def set_up(self): - num_segments = int(1e4) #Per cell + num_segments = int(1e4) # Per cell num_vertices = num_segments + 1 - x = np.linspace(0,10,num_vertices) + x = np.linspace(0, 10, num_vertices) y = np.zeros(num_vertices) z = np.zeros(num_vertices) - d = np.linspace(1,0.01,num_vertices) + d = np.linspace(1, 0.01, num_vertices) + + vertices = np.array([x, y, z, d]).T - vertices = np.array([x,y,z,d]).T - - connectivity = range(-1,num_segments) + connectivity = range(-1, num_segments) - big_arraymorph = am.ArrayMorphology(vertices = vertices, - connectivity = connectivity) - transposed_x = x+10 - transposed_vertices = np.array([transposed_x,y,z,d]).T + big_arraymorph = am.ArrayMorphology( + vertices=vertices, connectivity=connectivity + ) + transposed_x = x + 10 + transposed_vertices = np.array([transposed_x, y, z, d]).T - transposed_arraymorph = am.ArrayMorphology(vertices = transposed_vertices, - connectivity = connectivity) + transposed_arraymorph = am.ArrayMorphology( + vertices=transposed_vertices, connectivity=connectivity + ) bigger_d = d + 0.5 - fatter_vertices = np.array([x,y,z,bigger_d]).T + fatter_vertices = np.array([x, y, z, bigger_d]).T - fatter_arraymorph = am.ArrayMorphology(vertices = fatter_vertices, - connectivity = connectivity) + fatter_arraymorph = am.ArrayMorphology( + vertices=fatter_vertices, connectivity=connectivity + ) - neuroml_cell = neuroml.Cell(id='cell_4') - neuroml_morphology = neuroml.Morphology(id = 'my_morph') + neuroml_cell = neuroml.Cell(id="cell_4") + neuroml_morphology = neuroml.Morphology(id="my_morph") neuroml_cell.morphology = neuroml_morphology self.transposed_arraymorph = transposed_arraymorph self.fatter_arraymorph = fatter_arraymorph self.big_arraymorph = big_arraymorph - self.cell_1 = neuroml.Cell(id='cell_1') - self.cell_2 = neuroml.Cell(id='cell_2') - self.cell_3 = neuroml.Cell(id='cell_3') - + self.cell_1 = neuroml.Cell(id="cell_1") + self.cell_2 = neuroml.Cell(id="cell_2") + self.cell_3 = neuroml.Cell(id="cell_3") + self.cell_1.morphology = transposed_arraymorph self.cell_2.morphology = fatter_arraymorph self.cell_3.morphology = big_arraymorph - - self.test_doc = neuroml.NeuroMLDocument(id='TestDocument') + + self.test_doc = neuroml.NeuroMLDocument(id="TestDocument") self.test_doc.cells.append(self.cell_1) self.test_doc.cells.append(self.cell_2) self.test_doc.cells.append(self.cell_3) - self.test_doc.cells.append(neuroml_cell) - + self.test_doc.cells.append(neuroml_cell) diff --git a/neuroml/benchmarks/arraymorph_benchmarks.py b/neuroml/benchmarks/arraymorph_benchmarks.py index 7c779a4f..c41d4221 100644 --- a/neuroml/benchmarks/arraymorph_benchmarks.py +++ b/neuroml/benchmarks/arraymorph_benchmarks.py @@ -13,14 +13,13 @@ def timeit(method): - def timed(*args, **kw): ts = time.time() result = method(*args, **kw) te = time.time() - print('%r (%r, %r) %2.2f sec' % (method.__name__, args, kw, te-ts)) - return te-ts + print("%r (%r, %r) %2.2f sec" % (method.__name__, args, kw, te - ts)) + return te - ts return timed @@ -29,29 +28,31 @@ class AMWriteBenchmark(object): """ TODO: Get rid of methods which are not used """ - def __init__(self,num_segments=1e6): + + def __init__(self, num_segments=1e6): num_segments = int(num_segments) num_vertices = int(num_segments) + 1 - x = np.linspace(0,10,num_vertices) + x = np.linspace(0, 10, num_vertices) y = np.zeros(num_vertices) z = np.zeros(num_vertices) - d = np.linspace(1,0.01,num_vertices) + d = np.linspace(1, 0.01, num_vertices) - vertices = np.array([x,y,z,d]).T + vertices = np.array([x, y, z, d]).T - connectivity = range(-1,num_segments) + connectivity = range(-1, num_segments) - big_arraymorph = am.ArrayMorphology(vertices=vertices, - connectivity=connectivity) + big_arraymorph = am.ArrayMorphology( + vertices=vertices, connectivity=connectivity + ) self.big_arraymorph = big_arraymorph - self.cell = neuroml.Cell(id='test_cell') + self.cell = neuroml.Cell(id="test_cell") self.cell.morphology = big_arraymorph - self.test_doc = neuroml.NeuroMLDocument(id='TestDocument') + self.test_doc = neuroml.NeuroMLDocument(id="TestDocument") self.test_doc.cells.append(self.cell) @@ -80,51 +81,51 @@ def run_hdf5(self): def test_write_big_arraymorph_json(self): writer_method = neuroml.writers.JSONWriter.write - fh,filename = tempfile.mkstemp() + fh, filename = tempfile.mkstemp() try: - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) except: self.fail("Exception raised!") - print('JSON Number of segments:') + print("JSON Number of segments:") print(self.num_segments) - print('JSON size in bytes:') + print("JSON size in bytes:") print(self.file_size(filename)) os.close(fh) def test_write_big_arraymorph_neuroml(self): writer_method = neuroml.writers.NeuroMLWriter.write - fh,filename = tempfile.mkstemp() + fh, filename = tempfile.mkstemp() try: - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) except: self.fail("Exception raised!") - print('NeuroML (XML) Number of segments:') + print("NeuroML (XML) Number of segments:") print(self.num_segments) - print('NeuroML (XML) size in bytes:') + print("NeuroML (XML) size in bytes:") print(self.file_size(filename)) os.close(fh) - def file_size(self,path): + def file_size(self, path): return os.path.getsize(path) def test_write_big_arraymorph_hdf5(self): writer_method = neuroml.writers.ArrayMorphWriter.write - fh,filename = tempfile.mkstemp() + fh, filename = tempfile.mkstemp() try: - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) except: self.fail("Exception raised!") - print('HDF5 Number of segments:') + print("HDF5 Number of segments:") print(self.num_segments) - print('HDF5 size in bytes:') + print("HDF5 size in bytes:") print(self.file_size(filename)) os.close(fh) @@ -137,38 +138,44 @@ def test_write_expected(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.ArrayMorphWriter.write - writer_method(self.big_arraymorph,filename) + writer_method(self.big_arraymorph, filename) loader_method = neuroml.loaders.ArrayMorphLoader.load doc = loader_method(filename) array_morph = doc.morphology[0] - connectivity_equal = np.testing.assert_array_equal(array_morph.connectivity,self.big_arraymorph.connectivity) - physical_masks_equal = np.testing.assert_array_equal(array_morph.physical_mask,self.big_arraymorph.physical_mask) - vertices_equal = np.testing.assert_array_equal(array_morph.vertices,self.big_arraymorph.vertices) + connectivity_equal = np.testing.assert_array_equal( + array_morph.connectivity, self.big_arraymorph.connectivity + ) + physical_masks_equal = np.testing.assert_array_equal( + array_morph.physical_mask, self.big_arraymorph.physical_mask + ) + vertices_equal = np.testing.assert_array_equal( + array_morph.vertices, self.big_arraymorph.vertices + ) - self.assertEqual(connectivity_equal,None) # None when equal - self.assertEqual(physical_masks_equal,None) # None when equal - self.assertEqual(vertices_equal,None)# None when equal + self.assertEqual(connectivity_equal, None) # None when equal + self.assertEqual(physical_masks_equal, None) # None when equal + self.assertEqual(vertices_equal, None) # None when equal def test_write_multiple_morphologies(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.ArrayMorphWriter.write try: - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) except: self.fail("Exception raised!") def test_write_multiple_morphologies(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.ArrayMorphWriter.write - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) loader_method = neuroml.loaders.ArrayMorphLoader.load document = loader_method(filename) - self.assertIsInstance(document,neuroml.NeuroMLDocument) + self.assertIsInstance(document, neuroml.NeuroMLDocument) def benchmark_arraymorph_writer(): @@ -178,9 +185,9 @@ def benchmark_arraymorph_writer(): """ num_tests = 10 - json_results= [] - neuroml_results= [] - hdf5_results= [] + json_results = [] + neuroml_results = [] + hdf5_results = [] for i in range(num_tests): json_runtimes = [] @@ -226,67 +233,78 @@ def benchmark_arraymorph_writer(): np.savetxt("json_results.csv", json_results, delimiter=",") np.savetxt("hdf5_results.csv", hdf5_results, delimiter=",") - neuroml_runtimes_averaged = np.mean(neuroml_results,axis=0) - json_runtimes_averaged = np.mean(json_results,axis=0) - hdf5_runtimes_averaged = np.mean(hdf5_results,axis=0) + neuroml_runtimes_averaged = np.mean(neuroml_results, axis=0) + json_runtimes_averaged = np.mean(json_results, axis=0) + hdf5_runtimes_averaged = np.mean(hdf5_results, axis=0) - hdf5_errors = np.std(hdf5_results,axis=0) - json_errors = np.std(json_results,axis=0) - neuroml_errors = np.std(neuroml_results,axis=0) + hdf5_errors = np.std(hdf5_results, axis=0) + json_errors = np.std(json_results, axis=0) + neuroml_errors = np.std(neuroml_results, axis=0) neuroml_num_segments_list = np.array(neuroml_num_segments_list) json_num_segments_list = np.array(json_num_segments_list) hdf5_num_segments_list = np.array(hdf5_num_segments_list) - plt_neuroml = plt.errorbar(neuroml_num_segments_list, - neuroml_runtimes_averaged, - yerr=hdf5_errors, - marker='o', - color='k', - ecolor='k', - markerfacecolor='r', - label="series 2", - capsize=5,) + plt_neuroml = plt.errorbar( + neuroml_num_segments_list, + neuroml_runtimes_averaged, + yerr=hdf5_errors, + marker="o", + color="k", + ecolor="k", + markerfacecolor="r", + label="series 2", + capsize=5, + ) plt.title("ArrayMorph write to disk benchmark (NeuroML (XML) serialization)") plt.xlabel("Number of segments in morphology (Units of 1000 segments)") plt.ylabel("Time to write to disk (s)") - plt_hdf5 = plt.errorbar(hdf5_num_segments_list, - hdf5_runtimes_averaged, - yerr=hdf5_errors, - marker='o', - color='k', - ecolor='k', - markerfacecolor='g', - label="series 2", - capsize=5,) + plt_hdf5 = plt.errorbar( + hdf5_num_segments_list, + hdf5_runtimes_averaged, + yerr=hdf5_errors, + marker="o", + color="k", + ecolor="k", + markerfacecolor="g", + label="series 2", + capsize=5, + ) plt.title("ArrayMorph write to disk benchmark (HDF5 serialization)") plt.xlabel("Number of segments in morphology (Units of 1000 segments)") plt.ylabel("Time to write to disk (s)") -# plt.show() - - plt_json = plt.errorbar(json_num_segments_list, - json_runtimes_averaged, - yerr=json_errors, - marker='o', - color='k', - ecolor='k', - markerfacecolor='b', - label="series 2", - capsize=5,) - - plt.title("ArrayMorph write to disk benchmarks for JSON, HDF5 and NeuroML serialization formats") + # plt.show() + + plt_json = plt.errorbar( + json_num_segments_list, + json_runtimes_averaged, + yerr=json_errors, + marker="o", + color="k", + ecolor="k", + markerfacecolor="b", + label="series 2", + capsize=5, + ) + + plt.title( + "ArrayMorph write to disk benchmarks for JSON, HDF5 and NeuroML serialization formats" + ) plt.xlabel("Number of segments in morphology") plt.ylabel("Time to write to disk (s)") - plt.legend([plt_json, plt_hdf5,plt_neuroml], ["JSON serialization", "HDF5 serialization","NeuroML serialization"]) + plt.legend( + [plt_json, plt_hdf5, plt_neuroml], + ["JSON serialization", "HDF5 serialization", "NeuroML serialization"], + ) - plt.yscale('log') - plt.xscale('log') + plt.yscale("log") + plt.xscale("log") plt.show() -#prototype: +# prototype: if __name__ == "__main__": benchmark_arraymorph_writer() diff --git a/neuroml/examples/arraymorph_generation.py b/neuroml/examples/arraymorph_generation.py index 3506be4f..af281b6f 100644 --- a/neuroml/examples/arraymorph_generation.py +++ b/neuroml/examples/arraymorph_generation.py @@ -11,41 +11,43 @@ import neuroml.writers as writers import neuroml.arraymorph as am -p = neuroml.Point3DWithDiam(x=0,y=0,z=0,diameter=50) -d = neuroml.Point3DWithDiam(x=50,y=0,z=0,diameter=50) +p = neuroml.Point3DWithDiam(x=0, y=0, z=0, diameter=50) +d = neuroml.Point3DWithDiam(x=50, y=0, z=0, diameter=50) soma = neuroml.Segment(proximal=p, distal=d) -soma.name = 'Soma' +soma.name = "Soma" soma.id = 0 -#now make an axon with 100 compartments: +# now make an axon with 100 compartments: parent = neuroml.SegmentParent(segments=soma.id) parent_segment = soma axon_segments = [] seg_id = 1 for i in range(100): - p = neuroml.Point3DWithDiam(x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=parent_segment.distal.x+10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - axon_segment = neuroml.Segment(proximal = p, - distal = d, - parent = parent) + p = neuroml.Point3DWithDiam( + x=parent_segment.distal.x, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + d = neuroml.Point3DWithDiam( + x=parent_segment.distal.x + 10, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + axon_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) axon_segment.id = seg_id - - axon_segment.name = 'axon_segment_' + str(axon_segment.id) - #now reset everything: + axon_segment.name = "axon_segment_" + str(axon_segment.id) + + # now reset everything: parent = neuroml.SegmentParent(segments=axon_segment.id) parent_segment = axon_segment - seg_id += 1 + seg_id += 1 axon_segments.append(axon_segment) @@ -55,25 +57,25 @@ test_morphology.id = "TestMorphology" cell = neuroml.Cell() -cell.name = 'TestCell' -cell.id = 'TestCell' +cell.name = "TestCell" +cell.id = "TestCell" cell.morphology = test_morphology doc = neuroml.NeuroMLDocument() -#doc.name = "Test neuroML document" +# doc.name = "Test neuroML document" doc.cells.append(cell) doc.id = "TestNeuroMLDocument" -nml_file = 'tmp/arraymorph.nml' +nml_file = "tmp/arraymorph.nml" -writers.NeuroMLWriter.write(doc,nml_file) +writers.NeuroMLWriter.write(doc, nml_file) -print("Written morphology file to: "+nml_file) +print("Written morphology file to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 -validate_neuroml2(nml_file) \ No newline at end of file +validate_neuroml2(nml_file) diff --git a/neuroml/examples/build_3D_network.py b/neuroml/examples/build_3D_network.py index a6b16f24..4da0fff9 100644 --- a/neuroml/examples/build_3D_network.py +++ b/neuroml/examples/build_3D_network.py @@ -28,22 +28,23 @@ dend_len = 10 dend_num = 10 + def generateRandomMorphology(): morphology = Morphology() - p = Point3DWithDiam(x=0,y=0,z=0,diameter=soma_diam) - d = Point3DWithDiam(x=soma_len,y=0,z=0,diameter=soma_diam) - soma = Segment(proximal=p, distal=d, name = 'Soma', id = 0) + p = Point3DWithDiam(x=0, y=0, z=0, diameter=soma_diam) + d = Point3DWithDiam(x=soma_len, y=0, z=0, diameter=soma_diam) + soma = Segment(proximal=p, distal=d, name="Soma", id=0) morphology.segments.append(soma) parent_seg = soma - for dend_id in range(0,dend_num): + for dend_id in range(0, dend_num): - p = Point3DWithDiam(x=d.x,y=d.y,z=d.z,diameter=dend_diam) - d = Point3DWithDiam(x=p.x,y=p.y+dend_len,z=p.z,diameter=dend_diam) - dend = Segment(proximal=p, distal=d, name = 'Dend_%i'%dend_id, id = 1+dend_id) + p = Point3DWithDiam(x=d.x, y=d.y, z=d.z, diameter=dend_diam) + d = Point3DWithDiam(x=p.x, y=p.y + dend_len, z=p.z, diameter=dend_diam) + dend = Segment(proximal=p, distal=d, name="Dend_%i" % dend_id, id=1 + dend_id) dend.parent = SegmentParent(segments=parent_seg.id) parent_seg = dend @@ -53,80 +54,89 @@ def generateRandomMorphology(): return morphology + def run(): cell_num = 10 x_size = 500 y_size = 500 z_size = 500 - + nml_doc = NeuroMLDocument(id="Net3DExample") syn0 = ExpOneSynapse(id="syn0", gbase="65nS", erev="0mV", tau_decay="3ms") nml_doc.exp_one_synapses.append(syn0) - + net = Network(id="Net3D") nml_doc.networks.append(net) - proj_count = 0 - #conn_count = 0 + # conn_count = 0 - for cell_id in range(0,cell_num): + for cell_id in range(0, cell_num): - cell = Cell(id="Cell_%i"%cell_id) + cell = Cell(id="Cell_%i" % cell_id) cell.morphology = generateRandomMorphology() - + nml_doc.cells.append(cell) - pop = Population(id="Pop_%i"%cell_id, component=cell.id, type="populationList") + pop = Population( + id="Pop_%i" % cell_id, component=cell.id, type="populationList" + ) net.populations.append(pop) pop.properties.append(Property(tag="color", value="1 0 0")) inst = Instance(id="0") pop.instances.append(inst) - inst.location = Location(x=str(x_size*random()), y=str(y_size*random()), z=str(z_size*random())) - + inst.location = Location( + x=str(x_size * random()), y=str(y_size * random()), z=str(z_size * random()) + ) + prob_connection = 0.5 - for post in range(0,cell_num): + for post in range(0, cell_num): if post is not cell_id and random() <= prob_connection: - from_pop = "Pop_%i"%cell_id - to_pop = "Pop_%i"%post + from_pop = "Pop_%i" % cell_id + to_pop = "Pop_%i" % post pre_seg_id = 0 post_seg_id = 1 - - projection = Projection(id="Proj_%i"%proj_count, presynaptic_population=from_pop, postsynaptic_population=to_pop, synapse=syn0.id) + projection = Projection( + id="Proj_%i" % proj_count, + presynaptic_population=from_pop, + postsynaptic_population=to_pop, + synapse=syn0.id, + ) net.projections.append(projection) - connection = Connection(id=proj_count, \ - pre_cell_id="%s[%i]"%(from_pop,0), \ - pre_segment_id=pre_seg_id, \ - pre_fraction_along=random(), - post_cell_id="%s[%i]"%(to_pop,0), \ - post_segment_id=post_seg_id, - post_fraction_along=random()) + connection = Connection( + id=proj_count, + pre_cell_id="%s[%i]" % (from_pop, 0), + pre_segment_id=pre_seg_id, + pre_fraction_along=random(), + post_cell_id="%s[%i]" % (to_pop, 0), + post_segment_id=post_seg_id, + post_fraction_along=random(), + ) projection.connections.append(connection) proj_count += 1 - #net.synaptic_connections.append(SynapticConnection(from_="%s[%i]"%(from_pop,0), to="%s[%i]"%(to_pop,0))) - - - ####### Write to file ###### - - nml_file = 'tmp/net3d.nml' + # net.synaptic_connections.append(SynapticConnection(from_="%s[%i]"%(from_pop,0), to="%s[%i]"%(to_pop,0))) + + ####### Write to file ###### + + nml_file = "tmp/net3d.nml" writers.NeuroMLWriter.write(nml_doc, nml_file) - - print("Written network file to: "+nml_file) + print("Written network file to: " + nml_file) - ###### Validate the NeuroML ###### + ###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) + run() diff --git a/neuroml/examples/build_complete.py b/neuroml/examples/build_complete.py index 95b0b38d..26bde1ea 100644 --- a/neuroml/examples/build_complete.py +++ b/neuroml/examples/build_complete.py @@ -48,45 +48,46 @@ nml_doc.notes = "Lots of notes...." -IafCell0 = IafCell(id="iaf0", - C="1.0 nF", - thresh = "-50mV", - reset="-65mV", - leak_conductance="10 nS", - leak_reversal="-65mV") +IafCell0 = IafCell( + id="iaf0", + C="1.0 nF", + thresh="-50mV", + reset="-65mV", + leak_conductance="10 nS", + leak_reversal="-65mV", +) nml_doc.iaf_cells.append(IafCell0) -IafCell1 = IafCell(id="iaf1", - C="1.0 nF", - thresh = "-50mV", - reset="-65mV", - leak_conductance="20 nS", - leak_reversal="-65mV") +IafCell1 = IafCell( + id="iaf1", + C="1.0 nF", + thresh="-50mV", + reset="-65mV", + leak_conductance="20 nS", + leak_reversal="-65mV", +) nml_doc.iaf_cells.append(IafCell1) -iz0 = IzhikevichCell(id="iz0", v0="-70mV", thresh="30mV", a="0.02", b="0.2", c="-65.0", d="6") +iz0 = IzhikevichCell( + id="iz0", v0="-70mV", thresh="30mV", a="0.02", b="0.2", c="-65.0", d="6" +) nml_doc.izhikevich_cells.append(iz0) -syn0 = ExpOneSynapse(id="syn0", - gbase="14nS", - erev="0mV", - tau_decay="3ms") +syn0 = ExpOneSynapse(id="syn0", gbase="14nS", erev="0mV", tau_decay="3ms") nml_doc.exp_one_synapses.append(syn0) -syn1 = ExpTwoSynapse(id="syn1", - gbase="2nS", - erev="0mV", - tau_rise="1ms", - tau_decay="3ms") +syn1 = ExpTwoSynapse( + id="syn1", gbase="2nS", erev="0mV", tau_rise="1ms", tau_decay="3ms" +) nml_doc.exp_two_synapses.append(syn1) -gj = GapJunction(id="gj1",conductance="10pS") +gj = GapJunction(id="gj1", conductance="10pS") nml_doc.gap_junctions.append(gj) @@ -95,13 +96,14 @@ nml_doc.silent_synapses.append(sil_syn) -grad_syn = GradedSynapse(id="gs1",conductance="0.5pS",delta="5mV",Vth="-55mV",k="0.025per_ms",erev="0mV") +grad_syn = GradedSynapse( + id="gs1", conductance="0.5pS", delta="5mV", Vth="-55mV", k="0.025per_ms", erev="0mV" +) nml_doc.graded_synapses.append(grad_syn) -pfs = PoissonFiringSynapse(id='pfs', - average_rate='150Hz', - synapse=syn0.id, - spike_target="./%s"%syn0.id) +pfs = PoissonFiringSynapse( + id="pfs", average_rate="150Hz", synapse=syn0.id, spike_target="./%s" % syn0.id +) nml_doc.poisson_firing_synapses.append(pfs) @@ -111,43 +113,43 @@ nml_doc.networks.append(net) -size0 = int(5*scale) -pop0 = Population(id="IafPop0", - component=IafCell0.id, - size=size0) +size0 = int(5 * scale) +pop0 = Population(id="IafPop0", component=IafCell0.id, size=size0) net.populations.append(pop0) -size1 = int(5*scale) -pop1 = Population(id="IafPop1", - component=IafCell1.id, - size=size1) +size1 = int(5 * scale) +pop1 = Population(id="IafPop1", component=IafCell1.id, size=size1) net.populations.append(pop1) -size2 = int(5*scale) -pop2 = Population(id="IzhPop", - component=iz0.id, - size=size2) +size2 = int(5 * scale) +pop2 = Population(id="IzhPop", component=iz0.id, size=size2) net.populations.append(pop2) -cell_num = int(4*scale) -pop = Population(id="Pop_x", component=IafCell0.id, type="populationList",size=cell_num) +cell_num = int(4 * scale) +pop = Population( + id="Pop_x", component=IafCell0.id, type="populationList", size=cell_num +) net.populations.append(pop) pop.properties.append(Property(tag="color", value="1 0 0")) x_size = 500 y_size = 500 z_size = 500 - + for i in range(cell_num): inst = Instance(id=i) pop.instances.append(inst) - inst.location = Location(x=str(x_size*random.random()), y=str(y_size*random.random()), z=str(z_size*random.random())) + inst.location = Location( + x=str(x_size * random.random()), + y=str(y_size * random.random()), + z=str(z_size * random.random()), + ) prob_connection = 0.5 proj_count = 0 @@ -155,114 +157,142 @@ from_pop = pop.id to_pop = pop.id -projection = Projection(id="Proj", presynaptic_population=from_pop, postsynaptic_population=to_pop, synapse=syn0.id) -electricalProjection = ElectricalProjection(id="ElectProj", presynaptic_population=from_pop, postsynaptic_population=to_pop) -electricalProjectionW = ElectricalProjection(id="ElectProjW", presynaptic_population=from_pop, postsynaptic_population=to_pop) +projection = Projection( + id="Proj", + presynaptic_population=from_pop, + postsynaptic_population=to_pop, + synapse=syn0.id, +) +electricalProjection = ElectricalProjection( + id="ElectProj", presynaptic_population=from_pop, postsynaptic_population=to_pop +) +electricalProjectionW = ElectricalProjection( + id="ElectProjW", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.projections.append(projection) net.electrical_projections.append(electricalProjection) net.electrical_projections.append(electricalProjectionW) -input_list = InputList(id='il', - component=pfs.id, - populations=from_pop) +input_list = InputList(id="il", component=pfs.id, populations=from_pop) net.input_lists.append(input_list) -input_list_w = InputList(id='ilw', - component=pfs.id, - populations=from_pop) +input_list_w = InputList(id="ilw", component=pfs.id, populations=from_pop) net.input_lists.append(input_list_w) -for pre_index in range(0,cell_num): - - for post_index in range(0,cell_num): +for pre_index in range(0, cell_num): + + for post_index in range(0, cell_num): if pre_index != post_index and random.random() <= prob_connection: pre_seg_id = 0 post_seg_id = 0 - connection = ConnectionWD(id=proj_count, \ - pre_cell_id="../%s/%i/%s"%(from_pop,pre_index,IafCell0.id), \ - pre_segment_id=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell_id="../%s/%i/%s"%(to_pop,post_index,IafCell0.id), \ - post_segment_id=post_seg_id, - post_fraction_along=random.random(), - weight=random.random(), - delay='%sms'%(random.random()*10)) + connection = ConnectionWD( + id=proj_count, + pre_cell_id="../%s/%i/%s" % (from_pop, pre_index, IafCell0.id), + pre_segment_id=pre_seg_id, + pre_fraction_along=random.random(), + post_cell_id="../%s/%i/%s" % (to_pop, post_index, IafCell0.id), + post_segment_id=post_seg_id, + post_fraction_along=random.random(), + weight=random.random(), + delay="%sms" % (random.random() * 10), + ) projection.connection_wds.append(connection) - - electricalConnection = ElectricalConnectionInstance(id=proj_count, \ - pre_cell="../%s/%i/%s"%(from_pop,pre_index,IafCell0.id), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="../%s/%i/%s"%(to_pop,post_index,IafCell0.id), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - synapse=gj.id) - - electricalProjection.electrical_connection_instances.append(electricalConnection) - - electricalConnectionW = ElectricalConnectionInstanceW(id=proj_count, \ - pre_cell="../%s/%i/%s"%(from_pop,pre_index,IafCell0.id), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="../%s/%i/%s"%(to_pop,post_index,IafCell0.id), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - synapse=gj.id, - weight=random.random()) - - electricalProjectionW.electrical_connection_instance_ws.append(electricalConnectionW) - + + electricalConnection = ElectricalConnectionInstance( + id=proj_count, + pre_cell="../%s/%i/%s" % (from_pop, pre_index, IafCell0.id), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="../%s/%i/%s" % (to_pop, post_index, IafCell0.id), + post_segment=post_seg_id, + post_fraction_along=random.random(), + synapse=gj.id, + ) + + electricalProjection.electrical_connection_instances.append( + electricalConnection + ) + + electricalConnectionW = ElectricalConnectionInstanceW( + id=proj_count, + pre_cell="../%s/%i/%s" % (from_pop, pre_index, IafCell0.id), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="../%s/%i/%s" % (to_pop, post_index, IafCell0.id), + post_segment=post_seg_id, + post_fraction_along=random.random(), + synapse=gj.id, + weight=random.random(), + ) + + electricalProjectionW.electrical_connection_instance_ws.append( + electricalConnectionW + ) + proj_count += 1 - - input = Input(id=pre_index, - target="../%s/%i/%s"%(from_pop, pre_index, pop.component), - destination="synapses") - input_list.input.append(input) - - input_w = InputW(id=pre_index, - target="../%s/%i/%s"%(from_pop, pre_index, pop.component), - destination="synapses", - weight=10) - - input_list_w.input_ws.append(input_w) - + + input = Input( + id=pre_index, + target="../%s/%i/%s" % (from_pop, pre_index, pop.component), + destination="synapses", + ) + input_list.input.append(input) + + input_w = InputW( + id=pre_index, + target="../%s/%i/%s" % (from_pop, pre_index, pop.component), + destination="synapses", + weight=10, + ) + + input_list_w.input_ws.append(input_w) + proj_count = 0 from_pop = pop0.id to_pop = pop1.id -electricalProjection = ElectricalProjection(id="ElectProj0", presynaptic_population=from_pop, postsynaptic_population=to_pop) +electricalProjection = ElectricalProjection( + id="ElectProj0", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.electrical_projections.append(electricalProjection) -for pre_index in range(0,size0): - - for post_index in range(0,size1): +for pre_index in range(0, size0): + + for post_index in range(0, size1): if pre_index != post_index and random.random() <= prob_connection: pre_seg_id = 0 post_seg_id = 0 - - electricalConnection = ElectricalConnection(id=proj_count, \ - pre_cell="%s"%(pre_index), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="%s"%(post_index), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - synapse=gj.id) - + + electricalConnection = ElectricalConnection( + id=proj_count, + pre_cell="%s" % (pre_index), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="%s" % (post_index), + post_segment=post_seg_id, + post_fraction_along=random.random(), + synapse=gj.id, + ) + electricalProjection.electrical_connections.append(electricalConnection) - + proj_count += 1 -projection0 = Projection(id="ProjEmpty", presynaptic_population=from_pop, postsynaptic_population=to_pop, synapse=syn0.id) +projection0 = Projection( + id="ProjEmpty", + presynaptic_population=from_pop, + postsynaptic_population=to_pop, + synapse=syn0.id, +) net.projections.append(projection0) @@ -270,16 +300,23 @@ from_pop = pop0.id to_pop = pop1.id -projection = Projection(id="ProjConnection", presynaptic_population=from_pop, postsynaptic_population=to_pop, synapse=syn1.id) +projection = Projection( + id="ProjConnection", + presynaptic_population=from_pop, + postsynaptic_population=to_pop, + synapse=syn1.id, +) net.projections.append(projection) -connection = Connection(id=0, \ - pre_cell_id="../%s[%i]"%(from_pop,size0-1), \ - pre_segment_id=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell_id="../%s[%i]"%(to_pop,size0-1), \ - post_segment_id=post_seg_id, - post_fraction_along=random.random()) +connection = Connection( + id=0, + pre_cell_id="../%s[%i]" % (from_pop, size0 - 1), + pre_segment_id=pre_seg_id, + pre_fraction_along=random.random(), + post_cell_id="../%s[%i]" % (to_pop, size0 - 1), + post_segment_id=post_seg_id, + post_fraction_along=random.random(), +) projection.connections.append(connection) @@ -287,66 +324,80 @@ from_pop = pop0.id to_pop = pop1.id -continuous_projection = ContinuousProjection(id="ProjCC", presynaptic_population=from_pop, postsynaptic_population=to_pop) +continuous_projection = ContinuousProjection( + id="ProjCC", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.continuous_projections.append(continuous_projection) -continuous_connection = ContinuousConnection(id=0, \ - pre_cell="0", \ - post_cell="0", \ - pre_component=sil_syn.id, \ - post_component=grad_syn.id) +continuous_connection = ContinuousConnection( + id=0, + pre_cell="0", + post_cell="0", + pre_component=sil_syn.id, + post_component=grad_syn.id, +) continuous_projection.continuous_connections.append(continuous_connection) from_pop = pop.id to_pop = pop1.id -continuous_projection_i = ContinuousProjection(id="ProjCCI", presynaptic_population=from_pop, postsynaptic_population=to_pop) +continuous_projection_i = ContinuousProjection( + id="ProjCCI", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.continuous_projections.append(continuous_projection_i) -continuous_connection_i = ContinuousConnectionInstance(id=0, \ - pre_cell="../%s/%i/%s"%(from_pop,0,pop.component), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="../%s[%i]"%(to_pop,0), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - pre_component=sil_syn.id, \ - post_component=grad_syn.id) +continuous_connection_i = ContinuousConnectionInstance( + id=0, + pre_cell="../%s/%i/%s" % (from_pop, 0, pop.component), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="../%s[%i]" % (to_pop, 0), + post_segment=post_seg_id, + post_fraction_along=random.random(), + pre_component=sil_syn.id, + post_component=grad_syn.id, +) continuous_projection_i.continuous_connection_instances.append(continuous_connection_i) -continuous_projection_iw = ContinuousProjection(id="ProjCCIW", presynaptic_population=from_pop, postsynaptic_population=to_pop) +continuous_projection_iw = ContinuousProjection( + id="ProjCCIW", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.continuous_projections.append(continuous_projection_iw) -continuous_connection_iw = ContinuousConnectionInstanceW(id=0, \ - pre_cell="../%s/%i/%s"%(from_pop,0,pop.component), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="../%s[%i]"%(to_pop,0), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - pre_component=sil_syn.id, \ - post_component=grad_syn.id, \ - weight=5) - -continuous_projection_iw.continuous_connection_instance_ws.append(continuous_connection_iw) - - -nml_file = 'test_files/complete.nml' +continuous_connection_iw = ContinuousConnectionInstanceW( + id=0, + pre_cell="../%s/%i/%s" % (from_pop, 0, pop.component), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="../%s[%i]" % (to_pop, 0), + post_segment=post_seg_id, + post_fraction_along=random.random(), + pre_component=sil_syn.id, + post_component=grad_syn.id, + weight=5, +) + +continuous_projection_iw.continuous_connection_instance_ws.append( + continuous_connection_iw +) + + +nml_file = "test_files/complete.nml" writers.NeuroMLWriter.write(nml_doc, nml_file) -summary0 = nml_doc.summary() -print("Created:\n"+summary0) -print("Written network file to: "+nml_file) +summary0 = nml_doc.summary() +print("Created:\n" + summary0) +print("Written network file to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) -nml_h5_file = 'test_files/complete.nml.h5' +nml_h5_file = "test_files/complete.nml.h5" writers.NeuroMLHdf5Writer.write(nml_doc, nml_h5_file) -print("Written H5 network file to: "+nml_h5_file) +print("Written H5 network file to: " + nml_h5_file) diff --git a/neuroml/examples/build_network.py b/neuroml/examples/build_network.py index 18acc070..cf127c52 100644 --- a/neuroml/examples/build_network.py +++ b/neuroml/examples/build_network.py @@ -19,28 +19,29 @@ nml_doc = NeuroMLDocument(id="IafNet") -IafCell0 = IafCell(id="iaf0", - C="1.0 nF", - thresh = "-50mV", - reset="-65mV", - leak_conductance="10 nS", - leak_reversal="-65mV") +IafCell0 = IafCell( + id="iaf0", + C="1.0 nF", + thresh="-50mV", + reset="-65mV", + leak_conductance="10 nS", + leak_reversal="-65mV", +) nml_doc.iaf_cells.append(IafCell0) -IafCell1 = IafCell(id="iaf1", - C="1.0 nF", - thresh = "-50mV", - reset="-65mV", - leak_conductance="20 nS", - leak_reversal="-65mV") +IafCell1 = IafCell( + id="iaf1", + C="1.0 nF", + thresh="-50mV", + reset="-65mV", + leak_conductance="20 nS", + leak_reversal="-65mV", +) nml_doc.iaf_cells.append(IafCell1) -syn0 = ExpOneSynapse(id="syn0", - gbase="65nS", - erev="0mV", - tau_decay="3ms") +syn0 = ExpOneSynapse(id="syn0", gbase="65nS", erev="0mV", tau_decay="3ms") nml_doc.exp_one_synapses.append(syn0) @@ -49,51 +50,50 @@ nml_doc.networks.append(net) size0 = 5 -pop0 = Population(id="IafPop0", - component=IafCell0.id, - size=size0) +pop0 = Population(id="IafPop0", component=IafCell0.id, size=size0) net.populations.append(pop0) size1 = 5 -pop1 = Population(id="IafPop1", - component=IafCell0.id, - size=size1) +pop1 = Population(id="IafPop1", component=IafCell0.id, size=size1) net.populations.append(pop1) prob_connection = 0.5 -for pre in range(0,size0): +for pre in range(0, size0): - pg = PulseGenerator(id="pulseGen_%i"%pre, - delay="0ms", - duration="100ms", - amplitude="%f nA"%(0.1*random())) + pg = PulseGenerator( + id="pulseGen_%i" % pre, + delay="0ms", + duration="100ms", + amplitude="%f nA" % (0.1 * random()), + ) nml_doc.pulse_generators.append(pg) - exp_input = ExplicitInput(target="%s[%i]"%(pop0.id,pre), - input=pg.id) + exp_input = ExplicitInput(target="%s[%i]" % (pop0.id, pre), input=pg.id) net.explicit_inputs.append(exp_input) - for post in range(0,size1): + for post in range(0, size1): # fromxx is used since from is Python keyword if random() <= prob_connection: - syn = SynapticConnection(from_="%s[%i]"%(pop0.id,pre), - synapse=syn0.id, - to="%s[%i]"%(pop1.id,post)) + syn = SynapticConnection( + from_="%s[%i]" % (pop0.id, pre), + synapse=syn0.id, + to="%s[%i]" % (pop1.id, post), + ) net.synaptic_connections.append(syn) -nml_file = 'tmp/testnet.nml' +nml_file = "tmp/testnet.nml" writers.NeuroMLWriter.write(nml_doc, nml_file) -print("Written network file to: "+nml_file) +print("Written network file to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 diff --git a/neuroml/examples/build_network2.py b/neuroml/examples/build_network2.py index 042f0961..e0969217 100644 --- a/neuroml/examples/build_network2.py +++ b/neuroml/examples/build_network2.py @@ -35,42 +35,41 @@ nml_doc.notes = "Root notes" -IafCell0 = IafCell(id="iaf0", - C="1.0 nF", - thresh = "-50mV", - reset="-65mV", - leak_conductance="10 nS", - leak_reversal="-65mV") +IafCell0 = IafCell( + id="iaf0", + C="1.0 nF", + thresh="-50mV", + reset="-65mV", + leak_conductance="10 nS", + leak_reversal="-65mV", +) nml_doc.iaf_cells.append(IafCell0) -IafCell1 = IafCell(id="iaf1", - C="1.0 nF", - thresh = "-50mV", - reset="-65mV", - leak_conductance="20 nS", - leak_reversal="-65mV") +IafCell1 = IafCell( + id="iaf1", + C="1.0 nF", + thresh="-50mV", + reset="-65mV", + leak_conductance="20 nS", + leak_reversal="-65mV", +) nml_doc.iaf_cells.append(IafCell1) -syn0 = ExpOneSynapse(id="syn0", - gbase="65nS", - erev="0mV", - tau_decay="3ms") +syn0 = ExpOneSynapse(id="syn0", gbase="65nS", erev="0mV", tau_decay="3ms") nml_doc.exp_one_synapses.append(syn0) -gj = GapJunction(id="gj1",conductance="10pS") +gj = GapJunction(id="gj1", conductance="10pS") nml_doc.gap_junctions.append(gj) - -pfs = PoissonFiringSynapse(id='pfs', - average_rate='50Hz', - synapse=syn0.id, - spike_target="./%s"%syn0.id) +pfs = PoissonFiringSynapse( + id="pfs", average_rate="50Hz", synapse=syn0.id, spike_target="./%s" % syn0.id +) nml_doc.poisson_firing_synapses.append(pfs) @@ -80,35 +79,37 @@ nml_doc.networks.append(net) -size0 = int(5*scale) -pop0 = Population(id="IafPop0", - component=IafCell0.id, - size=size0) +size0 = int(5 * scale) +pop0 = Population(id="IafPop0", component=IafCell0.id, size=size0) net.populations.append(pop0) -size1 = int(5*scale) -pop1 = Population(id="IafPop1", - component=IafCell1.id, - size=size1) +size1 = int(5 * scale) +pop1 = Population(id="IafPop1", component=IafCell1.id, size=size1) net.populations.append(pop1) -cell_num = int(4*scale) -pop = Population(id="Pop_x", component=IafCell0.id, type="populationList",size=cell_num) +cell_num = int(4 * scale) +pop = Population( + id="Pop_x", component=IafCell0.id, type="populationList", size=cell_num +) net.populations.append(pop) pop.properties.append(Property(tag="color", value="1 0 0")) x_size = 500 y_size = 500 z_size = 500 - + for i in range(cell_num): inst = Instance(id=i) pop.instances.append(inst) - inst.location = Location(x=str(x_size*random.random()), y=str(y_size*random.random()), z=str(z_size*random.random())) + inst.location = Location( + x=str(x_size * random.random()), + y=str(y_size * random.random()), + z=str(z_size * random.random()), + ) prob_connection = 0.5 proj_count = 0 @@ -116,103 +117,119 @@ from_pop = "Pop_x" to_pop = "Pop_x" -projection = Projection(id="Proj", presynaptic_population=from_pop, postsynaptic_population=to_pop, synapse=syn0.id) -electricalProjection = ElectricalProjection(id="ElectProj", presynaptic_population=from_pop, postsynaptic_population=to_pop) +projection = Projection( + id="Proj", + presynaptic_population=from_pop, + postsynaptic_population=to_pop, + synapse=syn0.id, +) +electricalProjection = ElectricalProjection( + id="ElectProj", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.projections.append(projection) net.electrical_projections.append(electricalProjection) -input_list = InputList(id='il', - component=pfs.id, - populations=from_pop) +input_list = InputList(id="il", component=pfs.id, populations=from_pop) net.input_lists.append(input_list) -for pre_index in range(0,cell_num): - - for post_index in range(0,cell_num): +for pre_index in range(0, cell_num): + + for post_index in range(0, cell_num): if pre_index != post_index and random.random() <= prob_connection: pre_seg_id = 0 post_seg_id = 0 - connection = ConnectionWD(id=proj_count, \ - pre_cell_id="../%s/%i/%s"%(from_pop,pre_index,IafCell0.id), \ - pre_segment_id=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell_id="../%s/%i/%s"%(to_pop,post_index,IafCell0.id), \ - post_segment_id=post_seg_id, - post_fraction_along=random.random(), - weight=random.random(), - delay='%sms'%(random.random()*10)) + connection = ConnectionWD( + id=proj_count, + pre_cell_id="../%s/%i/%s" % (from_pop, pre_index, IafCell0.id), + pre_segment_id=pre_seg_id, + pre_fraction_along=random.random(), + post_cell_id="../%s/%i/%s" % (to_pop, post_index, IafCell0.id), + post_segment_id=post_seg_id, + post_fraction_along=random.random(), + weight=random.random(), + delay="%sms" % (random.random() * 10), + ) projection.connection_wds.append(connection) - - electricalConnection = ElectricalConnectionInstance(id=proj_count, \ - pre_cell="../%s/%i/%s"%(from_pop,pre_index,IafCell0.id), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="../%s/%i/%s"%(to_pop,post_index,IafCell0.id), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - synapse=gj.id) - - electricalProjection.electrical_connection_instances.append(electricalConnection) - + + electricalConnection = ElectricalConnectionInstance( + id=proj_count, + pre_cell="../%s/%i/%s" % (from_pop, pre_index, IafCell0.id), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="../%s/%i/%s" % (to_pop, post_index, IafCell0.id), + post_segment=post_seg_id, + post_fraction_along=random.random(), + synapse=gj.id, + ) + + electricalProjection.electrical_connection_instances.append( + electricalConnection + ) + proj_count += 1 - - input = Input(id=pre_index, - target="../%s/%i/%s"%(from_pop, pre_index, pop.component), - destination="synapses") - input_list.input.append(input) - + + input = Input( + id=pre_index, + target="../%s/%i/%s" % (from_pop, pre_index, pop.component), + destination="synapses", + ) + input_list.input.append(input) + proj_count = 0 from_pop = pop0.id to_pop = pop1.id -electricalProjection = ElectricalProjection(id="ElectProj0", presynaptic_population=from_pop, postsynaptic_population=to_pop) +electricalProjection = ElectricalProjection( + id="ElectProj0", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.electrical_projections.append(electricalProjection) -for pre_index in range(0,size0): - - for post_index in range(0,size1): +for pre_index in range(0, size0): + + for post_index in range(0, size1): if pre_index != post_index and random.random() <= prob_connection: pre_seg_id = 0 post_seg_id = 0 - - electricalConnection = ElectricalConnection(id=proj_count, \ - pre_cell="%s"%(pre_index), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="%s"%(post_index), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - synapse=gj.id) - + + electricalConnection = ElectricalConnection( + id=proj_count, + pre_cell="%s" % (pre_index), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="%s" % (post_index), + post_segment=post_seg_id, + post_fraction_along=random.random(), + synapse=gj.id, + ) + electricalProjection.electrical_connections.append(electricalConnection) - + proj_count += 1 -nml_file = 'test_files/testh5.nml' +nml_file = "test_files/testh5.nml" writers.NeuroMLWriter.write(nml_doc, nml_file) -print("Created:\n"+nml_doc.summary()) -print("Written network file to: "+nml_file) +print("Created:\n" + nml_doc.summary()) +print("Written network file to: " + nml_file) -nml_h5_file = 'tmp/testh5.nml.h5' +nml_h5_file = "tmp/testh5.nml.h5" writers.NeuroMLHdf5Writer.write(nml_doc, nml_h5_file) -print("Written H5 network file to: "+nml_h5_file) +print("Written H5 network file to: " + nml_h5_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) - diff --git a/neuroml/examples/ion_channel_generation.py b/neuroml/examples/ion_channel_generation.py index 70e809e5..229c067e 100644 --- a/neuroml/examples/ion_channel_generation.py +++ b/neuroml/examples/ion_channel_generation.py @@ -5,33 +5,31 @@ import neuroml import neuroml.writers as writers -chan = neuroml.IonChannelHH(id='na', - conductance='10pS', - species='na', - notes="This is an example voltage-gated Na channel") - -m_gate = neuroml.GateHHRates(id='m',instances='3') -h_gate = neuroml.GateHHRates(id='h',instances='1') - -m_gate.forward_rate = neuroml.HHRate(type="HHExpRate", - rate="0.07per_ms", - midpoint="-65mV", - scale="-20mV") - -m_gate.reverse_rate = neuroml.HHRate(type="HHSigmoidRate", - rate="1per_ms", - midpoint="-35mV", - scale="10mV") - -h_gate.forward_rate = neuroml.HHRate(type="HHExpLinearRate", - rate="0.1per_ms", - midpoint="-55mV", - scale="10mV") - -h_gate.reverse_rate = neuroml.HHRate(type="HHExpRate", - rate="0.125per_ms", - midpoint="-65mV", - scale="-80mV") +chan = neuroml.IonChannelHH( + id="na", + conductance="10pS", + species="na", + notes="This is an example voltage-gated Na channel", +) + +m_gate = neuroml.GateHHRates(id="m", instances="3") +h_gate = neuroml.GateHHRates(id="h", instances="1") + +m_gate.forward_rate = neuroml.HHRate( + type="HHExpRate", rate="0.07per_ms", midpoint="-65mV", scale="-20mV" +) + +m_gate.reverse_rate = neuroml.HHRate( + type="HHSigmoidRate", rate="1per_ms", midpoint="-35mV", scale="10mV" +) + +h_gate.forward_rate = neuroml.HHRate( + type="HHExpLinearRate", rate="0.1per_ms", midpoint="-55mV", scale="10mV" +) + +h_gate.reverse_rate = neuroml.HHRate( + type="HHExpRate", rate="0.125per_ms", midpoint="-65mV", scale="-80mV" +) chan.gate_hh_rates.append(m_gate) chan.gate_hh_rates.append(h_gate) @@ -41,13 +39,13 @@ doc.id = "ChannelMLDemo" -nml_file = './tmp/ionChannelTest.xml' -writers.NeuroMLWriter.write(doc,nml_file) +nml_file = "./tmp/ionChannelTest.xml" +writers.NeuroMLWriter.write(doc, nml_file) -print("Written channel file to: "+nml_file) +print("Written channel file to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 diff --git a/neuroml/examples/json_serialization.py b/neuroml/examples/json_serialization.py index e5b8be1c..381ef395 100644 --- a/neuroml/examples/json_serialization.py +++ b/neuroml/examples/json_serialization.py @@ -8,49 +8,51 @@ import neuroml.loaders as loaders import neuroml.writers as writers -fn = './test_files/Purk2M9s.nml' +fn = "./test_files/Purk2M9s.nml" doc = loaders.NeuroMLLoader.load(fn) -print("Loaded morphology file from: "+fn) +print("Loaded morphology file from: " + fn) -#get the parent segment: +# get the parent segment: parent_segment = doc.cells[0].morphology.segments[0] parent = neuroml.SegmentParent(segments=parent_segment.id) -#make an axon: -seg_id = 5000 # need a way to get a unique id from a morphology +# make an axon: +seg_id = 5000 # need a way to get a unique id from a morphology axon_segments = [] for i in range(10): - p = neuroml.Point3DWithDiam(x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=parent_segment.distal.x+10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - axon_segment = neuroml.Segment(proximal = p, - distal = d, - parent = parent) + p = neuroml.Point3DWithDiam( + x=parent_segment.distal.x, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + d = neuroml.Point3DWithDiam( + x=parent_segment.distal.x + 10, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + axon_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) axon_segment.id = seg_id - - axon_segment.name = 'axon_segment_' + str(axon_segment.id) - #now reset everything: + axon_segment.name = "axon_segment_" + str(axon_segment.id) + + # now reset everything: parent = neuroml.SegmentParent(segments=axon_segment.id) parent_segment = axon_segment - seg_id += 1 + seg_id += 1 axon_segments.append(axon_segment) doc.cells[0].morphology.segments += axon_segments -json_file = './tmp/modified_morphology.json' +json_file = "./tmp/modified_morphology.json" -writers.JSONWriter.write(doc,json_file) +writers.JSONWriter.write(doc, json_file) print("Saved modified morphology in JSON format to: " + json_file) @@ -59,13 +61,13 @@ neuroml_document_from_json = loaders.JSONLoader.load(json_file) -print("Re-loaded neuroml document in JSON format to NeuroMLDocument object") +print("Re-loaded neuroml document in JSON format to NeuroMLDocument object") -nml_file = './tmp/modified_morphology_from_json.nml' +nml_file = "./tmp/modified_morphology_from_json.nml" -writers.NeuroMLWriter.write(neuroml_document_from_json,nml_file) +writers.NeuroMLWriter.write(neuroml_document_from_json, nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 diff --git a/neuroml/examples/loading_modifying_writing.py b/neuroml/examples/loading_modifying_writing.py index a8221f36..23ebe7b5 100644 --- a/neuroml/examples/loading_modifying_writing.py +++ b/neuroml/examples/loading_modifying_writing.py @@ -7,54 +7,56 @@ import neuroml.loaders as loaders import neuroml.writers as writers -fn = './test_files/Purk2M9s.nml' +fn = "./test_files/Purk2M9s.nml" doc = loaders.NeuroMLLoader.load(fn) -print("Loaded morphology file from: "+fn) +print("Loaded morphology file from: " + fn) -#get the parent segment: +# get the parent segment: parent_segment = doc.cells[0].morphology.segments[0] parent = neuroml.SegmentParent(segments=parent_segment.id) -#make an axon: -seg_id = 5000 # need a way to get a unique id from a morphology +# make an axon: +seg_id = 5000 # need a way to get a unique id from a morphology axon_segments = [] for i in range(10): - p = neuroml.Point3DWithDiam(x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=parent_segment.distal.x+10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - axon_segment = neuroml.Segment(proximal = p, - distal = d, - parent = parent) + p = neuroml.Point3DWithDiam( + x=parent_segment.distal.x, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + d = neuroml.Point3DWithDiam( + x=parent_segment.distal.x + 10, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + axon_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) axon_segment.id = seg_id - - axon_segment.name = 'axon_segment_' + str(axon_segment.id) - #now reset everything: + axon_segment.name = "axon_segment_" + str(axon_segment.id) + + # now reset everything: parent = neuroml.SegmentParent(segments=axon_segment.id) parent_segment = axon_segment - seg_id += 1 + seg_id += 1 axon_segments.append(axon_segment) doc.cells[0].morphology.segments += axon_segments -nml_file = './tmp/modified_morphology.nml' +nml_file = "./tmp/modified_morphology.nml" -writers.NeuroMLWriter.write(doc,nml_file) +writers.NeuroMLWriter.write(doc, nml_file) -print("Saved modified morphology file to: "+nml_file) +print("Saved modified morphology file to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 diff --git a/neuroml/examples/loading_modifying_writing_large.py b/neuroml/examples/loading_modifying_writing_large.py index 113ab312..ee806cd7 100644 --- a/neuroml/examples/loading_modifying_writing_large.py +++ b/neuroml/examples/loading_modifying_writing_large.py @@ -9,12 +9,12 @@ from guppy import hpy -h = hpy() +h = hpy() -fn = './test_files/EC2-609291-4.CNG.nml' -#fn = './test_files/Purk2M9s.nml' +fn = "./test_files/EC2-609291-4.CNG.nml" +# fn = './test_files/Purk2M9s.nml' cells = [] @@ -24,58 +24,55 @@ doc = loaders.NeuroMLLoader.load(fn) cells.append(doc.cells[0]) - print("Loaded morphology file from: "+fn) + print("Loaded morphology file from: " + fn) - #get the parent segment: + # get the parent segment: parent_segment = doc.cells[0].morphology.segments[0] - print("Cell %i has %i segments"%(n,len(doc.cells[0].morphology.segments))) + print("Cell %i has %i segments" % (n, len(doc.cells[0].morphology.segments))) print(h.heap()) - - parent = neuroml.SegmentParent(segments=parent_segment.id) - #make an axon: - seg_id = 5000 # need a way to get a unique id from a morphology + # make an axon: + seg_id = 5000 # need a way to get a unique id from a morphology axon_segments = [] for i in range(10): - p = neuroml.Point3DWithDiam(x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=parent_segment.distal.x+10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - axon_segment = neuroml.Segment(proximal = p, - distal = d, - parent = parent) + p = neuroml.Point3DWithDiam( + x=parent_segment.distal.x, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + d = neuroml.Point3DWithDiam( + x=parent_segment.distal.x + 10, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + axon_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) axon_segment.id = seg_id - - axon_segment.name = 'axon_segment_' + str(axon_segment.id) - #now reset everything: + axon_segment.name = "axon_segment_" + str(axon_segment.id) + + # now reset everything: parent = neuroml.SegmentParent(segments=axon_segment.id) parent_segment = axon_segment - seg_id += 1 + seg_id += 1 axon_segments.append(axon_segment) doc.cells[0].morphology.segments += axon_segments - - -fn = './tmp/modified_morphology2.nml' -writers.NeuroMLWriter.write(doc,fn) -print("Saved modified morphology file to: "+fn) +fn = "./tmp/modified_morphology2.nml" +writers.NeuroMLWriter.write(doc, fn) -print("Done") - +print("Saved modified morphology file to: " + fn) +print("Done") diff --git a/neuroml/examples/morphology_generation.py b/neuroml/examples/morphology_generation.py index c4e2dff9..aa2cd19c 100644 --- a/neuroml/examples/morphology_generation.py +++ b/neuroml/examples/morphology_generation.py @@ -6,10 +6,10 @@ import neuroml import neuroml.writers as writers -p = neuroml.Point3DWithDiam(x=0,y=0,z=0,diameter=50) -d = neuroml.Point3DWithDiam(x=50,y=0,z=0,diameter=50) +p = neuroml.Point3DWithDiam(x=0, y=0, z=0, diameter=50) +d = neuroml.Point3DWithDiam(x=50, y=0, z=0, diameter=50) soma = neuroml.Segment(proximal=p, distal=d) -soma.name = 'Soma' +soma.name = "Soma" soma.id = 0 # Make an axon with 100 compartments: @@ -20,28 +20,30 @@ seg_id = 1 for i in range(100): - p = neuroml.Point3DWithDiam(x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=parent_segment.distal.x+10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - axon_segment = neuroml.Segment(proximal = p, - distal = d, - parent = parent) + p = neuroml.Point3DWithDiam( + x=parent_segment.distal.x, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + d = neuroml.Point3DWithDiam( + x=parent_segment.distal.x + 10, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + axon_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) axon_segment.id = seg_id - - axon_segment.name = 'axon_segment_' + str(axon_segment.id) - #now reset everything: + axon_segment.name = "axon_segment_" + str(axon_segment.id) + + # now reset everything: parent = neuroml.SegmentParent(segments=axon_segment.id) parent_segment = axon_segment - seg_id += 1 + seg_id += 1 axon_segments.append(axon_segment) @@ -51,22 +53,22 @@ test_morphology.id = "TestMorphology" cell = neuroml.Cell() -cell.name = 'TestCell' -cell.id = 'TestCell' +cell.name = "TestCell" +cell.id = "TestCell" cell.morphology = test_morphology -doc = neuroml.NeuroMLDocument(id = "TestNeuroMLDocument") +doc = neuroml.NeuroMLDocument(id="TestNeuroMLDocument") doc.cells.append(cell) -nml_file = 'tmp/testmorphwrite.nml' - -writers.NeuroMLWriter.write(doc,nml_file) - -print("Written morphology file to: "+nml_file) +nml_file = "tmp/testmorphwrite.nml" + +writers.NeuroMLWriter.write(doc, nml_file) + +print("Written morphology file to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 diff --git a/neuroml/examples/run_all.py b/neuroml/examples/run_all.py index 6f32acb7..69eff236 100644 --- a/neuroml/examples/run_all.py +++ b/neuroml/examples/run_all.py @@ -1,9 +1,11 @@ print("Running all examples...") + def run_example(ex_file): - print("-------------------------------------\nRunning %s"%ex_file) - exec("import %s"%ex_file[:-3]) - print("-------------------------------------\nCompleted: %s"%ex_file) + print("-------------------------------------\nRunning %s" % ex_file) + exec("import %s" % ex_file[:-3]) + print("-------------------------------------\nCompleted: %s" % ex_file) + run_example("arraymorph_generation.py") run_example("build_3D_network.py") @@ -14,7 +16,7 @@ def run_example(ex_file): run_example("json_serialization.py") run_example("loading_modifying_writing.py") # This next one takes a while to run but all of its functionality is covered in loading_modifying_writing.py -#run_example("loading_modifying_writing_large.py") +# run_example("loading_modifying_writing_large.py") run_example("morphology_generation.py") run_example("write_pynn.py") run_example("write_syns.py") diff --git a/neuroml/examples/single_izhikevich_reader.py b/neuroml/examples/single_izhikevich_reader.py index 046c0ce0..0525d3f4 100644 --- a/neuroml/examples/single_izhikevich_reader.py +++ b/neuroml/examples/single_izhikevich_reader.py @@ -1,8 +1,9 @@ -#from neuroml import NeuroMLDocument +# from neuroml import NeuroMLDocument from neuroml import IzhikevichCell from neuroml.loaders import NeuroMLLoader from neuroml.utils import validate_neuroml2 + def load_izhikevich(filename="./test_files/SingleIzhikevich.nml"): nml_filename = filename validate_neuroml2(nml_filename) @@ -11,7 +12,15 @@ def load_izhikevich(filename="./test_files/SingleIzhikevich.nml"): iz_cells = nml_doc.izhikevich_cells for i, iz in enumerate(iz_cells): if isinstance(iz, IzhikevichCell): - neuron_string = "%d %s %s %s %s %s (%s)" % (i, iz.v0, iz.a, iz.b, iz.c, iz.d, iz.id) + neuron_string = "%d %s %s %s %s %s (%s)" % ( + i, + iz.v0, + iz.a, + iz.b, + iz.c, + iz.d, + iz.id, + ) print(neuron_string) else: print("Error: Cell %d is not an IzhikevichCell" % i) diff --git a/neuroml/examples/single_izhikevich_writer.py b/neuroml/examples/single_izhikevich_writer.py index 095756cf..e49afc99 100644 --- a/neuroml/examples/single_izhikevich_writer.py +++ b/neuroml/examples/single_izhikevich_writer.py @@ -8,7 +8,9 @@ def write_izhikevich(filename="./tmp/SingleIzhikevich_test.nml"): nml_doc = NeuroMLDocument(id="SingleIzhikevich") nml_filename = filename - iz0 = IzhikevichCell(id="iz0", v0="-70mV", thresh="30mV", a="0.02", b="0.2", c="-65.0", d="6") + iz0 = IzhikevichCell( + id="iz0", v0="-70mV", thresh="30mV", a="0.02", b="0.2", c="-65.0", d="6" + ) nml_doc.izhikevich_cells.append(iz0) @@ -16,7 +18,4 @@ def write_izhikevich(filename="./tmp/SingleIzhikevich_test.nml"): validate_neuroml2(nml_filename) - write_izhikevich() - - diff --git a/neuroml/examples/write_pynn.py b/neuroml/examples/write_pynn.py index 7acae3ce..00ee9224 100644 --- a/neuroml/examples/write_pynn.py +++ b/neuroml/examples/write_pynn.py @@ -15,21 +15,49 @@ nml_doc = NeuroMLDocument(id="IafNet") -pynn0 = IF_curr_alpha(id="IF_curr_alpha_pop_IF_curr_alpha", cm="1.0", i_offset="0.9", tau_m="20.0", tau_refrac="10.0", tau_syn_E="0.5", tau_syn_I="0.5", v_init="-65", v_reset="-62.0", v_rest="-65.0", v_thresh="-52.0") +pynn0 = IF_curr_alpha( + id="IF_curr_alpha_pop_IF_curr_alpha", + cm="1.0", + i_offset="0.9", + tau_m="20.0", + tau_refrac="10.0", + tau_syn_E="0.5", + tau_syn_I="0.5", + v_init="-65", + v_reset="-62.0", + v_rest="-65.0", + v_thresh="-52.0", +) nml_doc.IF_curr_alpha.append(pynn0) -pynn1 = HH_cond_exp(id="HH_cond_exp_pop_HH_cond_exp", cm="0.2", e_rev_E="0.0", e_rev_I="-80.0", e_rev_K="-90.0", e_rev_Na="50.0", e_rev_leak="-65.0", g_leak="0.01", gbar_K="6.0", gbar_Na="20.0", i_offset="0.2", tau_syn_E="0.2", tau_syn_I="2.0", v_init="-65", v_offset="-63.0") +pynn1 = HH_cond_exp( + id="HH_cond_exp_pop_HH_cond_exp", + cm="0.2", + e_rev_E="0.0", + e_rev_I="-80.0", + e_rev_K="-90.0", + e_rev_Na="50.0", + e_rev_leak="-65.0", + g_leak="0.01", + gbar_K="6.0", + gbar_Na="20.0", + i_offset="0.2", + tau_syn_E="0.2", + tau_syn_I="2.0", + v_init="-65", + v_offset="-63.0", +) nml_doc.HH_cond_exp.append(pynn1) pynnSynn0 = ExpCondSynapse(id="ps1", tau_syn="5", e_rev="0") nml_doc.exp_cond_synapses.append(pynnSynn0) -nml_file = 'tmp/pynn_network.xml' +nml_file = "tmp/pynn_network.xml" writers.NeuroMLWriter.write(nml_doc, nml_file) -print("Saved to: "+nml_file) +print("Saved to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 diff --git a/neuroml/examples/write_syns.py b/neuroml/examples/write_syns.py index 8938f17e..af806476 100644 --- a/neuroml/examples/write_syns.py +++ b/neuroml/examples/write_syns.py @@ -15,27 +15,36 @@ expOneSyn0 = ExpOneSynapse(id="ampa", tau_decay="5ms", gbase="1nS", erev="0mV") nml_doc.exp_one_synapses.append(expOneSyn0) -expTwoSyn0 = ExpTwoSynapse(id="gaba", tau_decay="12ms", tau_rise="3ms", gbase="1nS", erev="-70mV") +expTwoSyn0 = ExpTwoSynapse( + id="gaba", tau_decay="12ms", tau_rise="3ms", gbase="1nS", erev="-70mV" +) nml_doc.exp_two_synapses.append(expTwoSyn0) -bpSyn = BlockingPlasticSynapse(id="blockStpSynDep", gbase="1nS", erev="0mV", tau_rise="0.1ms", tau_decay="2ms") +bpSyn = BlockingPlasticSynapse( + id="blockStpSynDep", gbase="1nS", erev="0mV", tau_rise="0.1ms", tau_decay="2ms" +) bpSyn.notes = "This is a note" -bpSyn.plasticity_mechanism = PlasticityMechanism(type="tsodyksMarkramDepMechanism", init_release_prob="0.5", tau_rec="120 ms") -bpSyn.block_mechanism = BlockMechanism(type="voltageConcDepBlockMechanism", species="mg", block_concentration="1.2 mM", scaling_conc="1.920544 mM", scaling_volt="16.129 mV") +bpSyn.plasticity_mechanism = PlasticityMechanism( + type="tsodyksMarkramDepMechanism", init_release_prob="0.5", tau_rec="120 ms" +) +bpSyn.block_mechanism = BlockMechanism( + type="voltageConcDepBlockMechanism", + species="mg", + block_concentration="1.2 mM", + scaling_conc="1.920544 mM", + scaling_volt="16.129 mV", +) nml_doc.blocking_plastic_synapses.append(bpSyn) -nml_file = 'tmp/synapses.xml' +nml_file = "tmp/synapses.xml" writers.NeuroMLWriter.write(nml_doc, nml_file) -print("Saved to: "+nml_file) +print("Saved to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) - - - diff --git a/neuroml/hdf5/DefaultNetworkHandler.py b/neuroml/hdf5/DefaultNetworkHandler.py index aff7d80f..2148cbcb 100644 --- a/neuroml/hdf5/DefaultNetworkHandler.py +++ b/neuroml/hdf5/DefaultNetworkHandler.py @@ -5,7 +5,7 @@ # Parsing classes, e.g. NetworkBuilder should call the appropriate # function here when a cell location, connection, etc. is encountered. # -# Use of this handler class should mean that the network setup is +# Use of this handler class should mean that the network setup is # independent of the source of the network info (XML or HDF5 based NeuroML # files for example) and the instantiator of the network objects (NetManagerNEURON # or PyNN based setup class) @@ -22,9 +22,9 @@ class DefaultNetworkHandler: - + log = logging.getLogger("DefaultNetworkHandler") - + isParallel = 0 # @@ -32,142 +32,249 @@ class DefaultNetworkHandler: # def print_location_information(self, id, population_id, component, x, y, z): position = "(%s, %s, %s)" % (x, y, z) - self.log.debug("Location "+str(id)+" of population: "+population_id+", component: "+component+": "+position) - - + self.log.debug( + "Location " + + str(id) + + " of population: " + + population_id + + ", component: " + + component + + ": " + + position + ) + # # Internal info method, can be reused in overriding classes for debugging - # - def print_connection_information(self, projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight): - self.log.debug("Connection "+str(id)+" of: "+projName+": cell "+str(preCellId)+" in "+prePop \ - +" -> cell "+str(postCellId)+" in "+postPop+", syn: "+ str(synapseType)+", weight: "+str(weight)) - - + # + def print_connection_information( + self, projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight + ): + self.log.debug( + "Connection " + + str(id) + + " of: " + + projName + + ": cell " + + str(preCellId) + + " in " + + prePop + + " -> cell " + + str(postCellId) + + " in " + + postPop + + ", syn: " + + str(synapseType) + + ", weight: " + + str(weight) + ) + # # Internal info method, can be reused in overriding classes for debugging - # + # def print_input_information(self, inputName, population_id, component, size=-1): - sizeInfo = " size: "+ str(size)+ " cells" - self.log.debug("Input Source: "+inputName+", on population: "+population_id+sizeInfo+" with component: "+ component) - + sizeInfo = " size: " + str(size) + " cells" + self.log.debug( + "Input Source: " + + inputName + + ", on population: " + + population_id + + sizeInfo + + " with component: " + + component + ) + # Temp for older API version def printLocationInformation(self, id, population_id, component, x, y, z): return self.print_location_information(id, population_id, component, x, y, z) - def printConnectionInformation(self, projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight): - return self.print_connection_information(projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight) + + def printConnectionInformation( + self, projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight + ): + return self.print_connection_information( + projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight + ) + def printInputInformation(self, inputName, population_id, component, size=-1): return self.print_input_information(inputName, population_id, component, size) # # Should be overridden - # + # def handle_document_start(self, id, notes): - - self.log.debug("Document: %s"%id) + + self.log.debug("Document: %s" % id) if notes: - self.log.debug(" Notes: "+notes) + self.log.debug(" Notes: " + notes) # # Should be overridden to create network - # + # def handle_network(self, network_id, notes, temperature=None): - - self.log.debug("Network: %s"%network_id) + + self.log.debug("Network: %s" % network_id) if temperature: - self.log.debug(" Temperature: "+temperature) + self.log.debug(" Temperature: " + temperature) if notes: - self.log.debug(" Notes: "+notes) + self.log.debug(" Notes: " + notes) # # Should be overridden to create population array - # - def handle_population(self, population_id, component, size=-1, component_obj=None, properties={}, notes=None): + # + def handle_population( + self, + population_id, + component, + size=-1, + component_obj=None, + properties={}, + notes=None, + ): sizeInfo = " as yet unspecified size" - if size>=0: - sizeInfo = " size: "+ str(size)+ " cells" + if size >= 0: + sizeInfo = " size: " + str(size) + " cells" if component_obj: - compInfo = " (%s)"%component_obj.__class__.__name__ + compInfo = " (%s)" % component_obj.__class__.__name__ else: - compInfo="" - propsInfo='' - if len(properties)>0: - propsInfo+='; %s'%properties - - self.log.debug("Population: "+population_id+", component: "+component+compInfo+sizeInfo+propsInfo) - - + compInfo = "" + propsInfo = "" + if len(properties) > 0: + propsInfo += "; %s" % properties + + self.log.debug( + "Population: " + + population_id + + ", component: " + + component + + compInfo + + sizeInfo + + propsInfo + ) + # # Should be overridden to create specific cell instance - # + # def handle_location(self, id, population_id, component, x, y, z): self.print_location_information(id, population_id, component, x, y, z) - - # # Should be overridden to create population array # - def handle_projection(self, projName, prePop, postPop, synapse, hasWeights=False, hasDelays=False, type="projection", synapse_obj=None, pre_synapse_obj=None): + def handle_projection( + self, + projName, + prePop, + postPop, + synapse, + hasWeights=False, + hasDelays=False, + type="projection", + synapse_obj=None, + pre_synapse_obj=None, + ): - synInfo="" + synInfo = "" if synapse_obj: - synInfo += " (syn: %s)"%synapse_obj.__class__.__name__ - - if pre_synapse_obj: - synInfo += " (pre comp: %s)"%pre_synapse_obj.__class__.__name__ + synInfo += " (syn: %s)" % synapse_obj.__class__.__name__ - self.log.debug("Projection: "+projName+" ("+type+") from "+prePop+" to "+postPop+" with syn: %s%s"%(synapse,synInfo)) + if pre_synapse_obj: + synInfo += " (pre comp: %s)" % pre_synapse_obj.__class__.__name__ + self.log.debug( + "Projection: " + + projName + + " (" + + type + + ") from " + + prePop + + " to " + + postPop + + " with syn: %s%s" % (synapse, synInfo) + ) # # Should be overridden to handle network connection - # - def handle_connection(self, projName, id, prePop, postPop, synapseType, \ - preCellId, \ - postCellId, \ - preSegId = 0, \ - preFract = 0.5, \ - postSegId = 0, \ - postFract = 0.5, \ - delay = 0, \ - weight = 1): - - self.print_connection_information(projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight) - if preSegId != 0 or postSegId!=0 or preFract != 0.5 or postFract != 0.5: - self.log.debug("Src cell: %d, seg: %f, fract: %f -> Tgt cell %d, seg: %f, fract: %f; weight %s, delay: %s ms" % (preCellId,preSegId,preFract,postCellId,postSegId,postFract, weight, delay)) - + # + def handle_connection( + self, + projName, + id, + prePop, + postPop, + synapseType, + preCellId, + postCellId, + preSegId=0, + preFract=0.5, + postSegId=0, + postFract=0.5, + delay=0, + weight=1, + ): + + self.print_connection_information( + projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight + ) + if preSegId != 0 or postSegId != 0 or preFract != 0.5 or postFract != 0.5: + self.log.debug( + "Src cell: %d, seg: %f, fract: %f -> Tgt cell %d, seg: %f, fract: %f; weight %s, delay: %s ms" + % ( + preCellId, + preSegId, + preFract, + postCellId, + postSegId, + postFract, + weight, + delay, + ) + ) + # # Should be overridden to handle end of network connection - # - def finalise_projection(self, projName, prePop, postPop, synapse=None, type="projection"): - - self.log.debug("Projection: "+projName+" from "+prePop+" to "+postPop+" completed") - - + # + def finalise_projection( + self, projName, prePop, postPop, synapse=None, type="projection" + ): + + self.log.debug( + "Projection: " + + projName + + " from " + + prePop + + " to " + + postPop + + " completed" + ) + # # Should be overridden to create input source array - # - def handle_input_list(self, inputListId, population_id, component, size, input_comp_obj=None): - + # + def handle_input_list( + self, inputListId, population_id, component, size, input_comp_obj=None + ): + self.print_input_information(inputListId, population_id, component, size) - - if size<0: - self.log.error("Error! Need a size attribute in sites element to create spike source!") + + if size < 0: + self.log.error( + "Error! Need a size attribute in sites element to create spike source!" + ) return - - + # # Should be overridden to to connect each input to the target cell - # - def handle_single_input(self, inputListId, id, cellId, segId = 0, fract = 0.5, weight=1): - - self.log.debug("Input: %s[%s], cellId: %i, seg: %i, fract: %f, weight: %f" % (inputListId,id,cellId,segId,fract,weight)) - - + # + def handle_single_input( + self, inputListId, id, cellId, segId=0, fract=0.5, weight=1 + ): + + self.log.debug( + "Input: %s[%s], cellId: %i, seg: %i, fract: %f, weight: %f" + % (inputListId, id, cellId, segId, fract, weight) + ) + # # Should be overridden to to connect each input to the target cell - # + # def finalise_input_source(self, inputName): self.log.debug("Input : %s completed" % inputName) - - diff --git a/neuroml/hdf5/NetworkBuilder.py b/neuroml/hdf5/NetworkBuilder.py index f85e7f73..a2408df4 100644 --- a/neuroml/hdf5/NetworkBuilder.py +++ b/neuroml/hdf5/NetworkBuilder.py @@ -21,343 +21,449 @@ class NetworkBuilder(DefaultNetworkHandler): - + log = logging.getLogger("NetworkBuilder") - + populations = {} projections = {} projection_syns = {} projection_types = {} projection_syns_pre = {} input_lists = {} - + weightDelays = {} - - def get_nml_doc(self): return self.nml_doc - + # # Overridden from DefaultNetworkHandler - # + # def handle_document_start(self, id, notes): self.nml_doc = neuroml.NeuroMLDocument(id=id) - if notes and len(notes)>0: + if notes and len(notes) > 0: self.nml_doc.notes = notes - + # # Overridden from DefaultNetworkHandler - # + # def handle_network(self, network_id, notes, temperature=None): - + self.network = neuroml.Network(id=network_id) self.nml_doc.networks.append(self.network) - if notes and len(notes)>0: + if notes and len(notes) > 0: self.network.notes = notes if temperature is not None: self.network.temperature = temperature self.network.type = "networkWithTemperature" - + # # Overridden from DefaultNetworkHandler - # - def handle_population(self, population_id, component, size, component_obj=None, properties={}, notes=None): - + # + def handle_population( + self, + population_id, + component, + size, + component_obj=None, + properties={}, + notes=None, + ): + if component_obj: self.nml_doc.append(component_obj) - + pop = neuroml.Population(id=population_id, component=component, size=size) - - if notes and len(notes)>0: + + if notes and len(notes) > 0: pop.notes = notes - + self.populations[population_id] = pop self.network.populations.append(pop) for p in properties: - pop.properties.append(neuroml.Property(p,properties[p])) - - comp_obj_info = ' (%s)'%type(component_obj) if component_obj else '' - - if (size>=0): - sizeInfo = ", size "+ str(size)+ " cells" - self.log.debug("Creating population: "+population_id+", cell type: "+component+comp_obj_info+sizeInfo) + pop.properties.append(neuroml.Property(p, properties[p])) + + comp_obj_info = " (%s)" % type(component_obj) if component_obj else "" + + if size >= 0: + sizeInfo = ", size " + str(size) + " cells" + self.log.debug( + "Creating population: " + + population_id + + ", cell type: " + + component + + comp_obj_info + + sizeInfo + ) else: - self.log.error("Population: "+population_id+", cell type: "+component+comp_obj_info+" specifies no size. May lead to errors!") - - + self.log.error( + "Population: " + + population_id + + ", cell type: " + + component + + comp_obj_info + + " specifies no size. May lead to errors!" + ) + # # Overridden from DefaultNetworkHandler - # + # def handle_location(self, id, population_id, component, x, y, z): ##print('Loc: %s %s (%s,%s,%s)'%(id, population_id, x, y, z)) self.print_location_information(id, population_id, component, x, y, z) - + if x is not None and y is not None and z is not None: inst = neuroml.Instance(id=id) - inst.location = neuroml.Location(x=x,y=y,z=z) + inst.location = neuroml.Location(x=x, y=y, z=z) self.populations[population_id].instances.append(inst) - self.populations[population_id].type = 'populationList' + self.populations[population_id].type = "populationList" else: - self.log.warning('Ignoring location: %s %s (%s,%s,%s)'%(id, population_id, x, y, z)) - + self.log.warning( + "Ignoring location: %s %s (%s,%s,%s)" % (id, population_id, x, y, z) + ) + # # Overridden from DefaultNetworkHandler # - def handle_projection(self, id, prePop, postPop, synapse, hasWeights=False, hasDelays=False, type="projection", synapse_obj=None, pre_synapse_obj=None): - + def handle_projection( + self, + id, + prePop, + postPop, + synapse, + hasWeights=False, + hasDelays=False, + type="projection", + synapse_obj=None, + pre_synapse_obj=None, + ): + if synapse_obj: self.nml_doc.append(synapse_obj) if pre_synapse_obj: self.nml_doc.append(pre_synapse_obj) - + proj = None - - if type=="projection": - proj = neuroml.Projection(id=id, presynaptic_population=prePop, postsynaptic_population=postPop, synapse=synapse) + + if type == "projection": + proj = neuroml.Projection( + id=id, + presynaptic_population=prePop, + postsynaptic_population=postPop, + synapse=synapse, + ) self.network.projections.append(proj) - elif type=="electricalProjection": - proj = neuroml.ElectricalProjection(id=id, presynaptic_population=prePop, postsynaptic_population=postPop) + elif type == "electricalProjection": + proj = neuroml.ElectricalProjection( + id=id, presynaptic_population=prePop, postsynaptic_population=postPop + ) self.network.electrical_projections.append(proj) self.projection_syns[id] = synapse - elif type=="continuousProjection": - proj = neuroml.ContinuousProjection(id=id, presynaptic_population=prePop, postsynaptic_population=postPop) + elif type == "continuousProjection": + proj = neuroml.ContinuousProjection( + id=id, presynaptic_population=prePop, postsynaptic_population=postPop + ) self.network.continuous_projections.append(proj) - - if synapse_obj!=None: + + if synapse_obj != None: self.projection_syns[id] = synapse_obj.id else: self.projection_syns[id] = synapse - + if pre_synapse_obj == None: - pre_synapse_obj = neuroml.SilentSynapse(id="silentSyn_%s"%id) + pre_synapse_obj = neuroml.SilentSynapse(id="silentSyn_%s" % id) self.nml_doc.silent_synapses.append(pre_synapse_obj) - + self.projection_syns_pre[id] = pre_synapse_obj.id - + self.projections[id] = proj self.projection_types[id] = type self.weightDelays[id] = hasWeights or hasDelays - self.log.debug("Projection: %s (%s) from %s to %s with syn: %s, weights: %s, delays: %s"%(id, type, prePop, postPop, synapse, hasWeights, hasDelays)) - + self.log.debug( + "Projection: %s (%s) from %s to %s with syn: %s, weights: %s, delays: %s" + % (id, type, prePop, postPop, synapse, hasWeights, hasDelays) + ) + # # Overridden from DefaultNetworkHandler # def finalise_projection(self, id, prePop, postPop, synapse=None, type=None): - ''' - Check whether handle_projection was not called, e.g. due to no connections present - ''' - if type==None: - type=self.projection_types[id] - - self.log.debug("Projection: %s (%s) from %s to %s completed"%(id,type,prePop,postPop)) - - if type=="projection": + """ + Check whether handle_projection was not called, e.g. due to no connections present + """ + if type == None: + type = self.projection_types[id] + + self.log.debug( + "Projection: %s (%s) from %s to %s completed" % (id, type, prePop, postPop) + ) + + if type == "projection": present = False for p in self.network.projections: - if p.id==id: present=True + if p.id == id: + present = True if not present: - proj = neuroml.Projection(id=id, presynaptic_population=prePop, postsynaptic_population=postPop, synapse=synapse) + proj = neuroml.Projection( + id=id, + presynaptic_population=prePop, + postsynaptic_population=postPop, + synapse=synapse, + ) self.network.projections.append(proj) - elif type=="electricalProjection": + elif type == "electricalProjection": present = False for p in self.network.electrical_projections: - if p.id==id: present=True + if p.id == id: + present = True if not present: - proj = neuroml.ElectricalProjection(id=id, presynaptic_population=prePop, postsynaptic_population=postPop) + proj = neuroml.ElectricalProjection( + id=id, + presynaptic_population=prePop, + postsynaptic_population=postPop, + ) self.network.electrical_projections.append(proj) - + # # Overridden from DefaultNetworkHandler # # Assumes delay in ms - # - def handle_connection(self, proj_id, conn_id, prePop, postPop, synapseType, \ - preCellId, \ - postCellId, \ - preSegId = 0, \ - preFract = 0.5, \ - postSegId = 0, \ - postFract = 0.5, \ - delay=0, - weight=1): - - #self.printConnectionInformation(proj_id, conn_id, prePop, postPop, synapseType, preCellId, postCellId, weight) - - pre_cell_path = "../%s/%i/%s"%(prePop,preCellId,self.populations[prePop].component) - if self.populations[prePop].type==None: - pre_cell_path = "../%s[%i]"%(prePop,preCellId) - - post_cell_path = "../%s/%i/%s"%(postPop,postCellId,self.populations[postPop].component) - if self.populations[postPop].type==None: - post_cell_path = "../%s[%i]"%(postPop,postCellId) - + # + def handle_connection( + self, + proj_id, + conn_id, + prePop, + postPop, + synapseType, + preCellId, + postCellId, + preSegId=0, + preFract=0.5, + postSegId=0, + postFract=0.5, + delay=0, + weight=1, + ): + + # self.printConnectionInformation(proj_id, conn_id, prePop, postPop, synapseType, preCellId, postCellId, weight) + + pre_cell_path = "../%s/%i/%s" % ( + prePop, + preCellId, + self.populations[prePop].component, + ) + if self.populations[prePop].type == None: + pre_cell_path = "../%s[%i]" % (prePop, preCellId) + + post_cell_path = "../%s/%i/%s" % ( + postPop, + postCellId, + self.populations[postPop].component, + ) + if self.populations[postPop].type == None: + post_cell_path = "../%s[%i]" % (postPop, postCellId) + if isinstance(self.projections[proj_id], neuroml.ElectricalProjection): - + instances = False - if len(self.populations[prePop].instances)>0 or len(self.populations[postPop].instances)>0: + if ( + len(self.populations[prePop].instances) > 0 + or len(self.populations[postPop].instances) > 0 + ): instances = True - + if not instances: - conn = neuroml.ElectricalConnection(id=conn_id, \ - pre_cell="%s"%(preCellId), \ - pre_segment=preSegId, \ - pre_fraction_along=preFract, - post_cell="%s"%(postCellId), \ - post_segment=postSegId, - post_fraction_along=postFract, - synapse=self.projection_syns[proj_id]) - + conn = neuroml.ElectricalConnection( + id=conn_id, + pre_cell="%s" % (preCellId), + pre_segment=preSegId, + pre_fraction_along=preFract, + post_cell="%s" % (postCellId), + post_segment=postSegId, + post_fraction_along=postFract, + synapse=self.projection_syns[proj_id], + ) + self.projections[proj_id].electrical_connections.append(conn) - + else: - if weight==1: - conn = neuroml.ElectricalConnectionInstance(id=conn_id, \ - pre_cell=pre_cell_path, \ - pre_segment=preSegId, \ - pre_fraction_along=preFract, - post_cell=post_cell_path, \ - post_segment=postSegId, - post_fraction_along=postFract, - synapse=self.projection_syns[proj_id]) - - self.projections[proj_id].electrical_connection_instances.append(conn) + if weight == 1: + conn = neuroml.ElectricalConnectionInstance( + id=conn_id, + pre_cell=pre_cell_path, + pre_segment=preSegId, + pre_fraction_along=preFract, + post_cell=post_cell_path, + post_segment=postSegId, + post_fraction_along=postFract, + synapse=self.projection_syns[proj_id], + ) + + self.projections[proj_id].electrical_connection_instances.append( + conn + ) else: - conn = neuroml.ElectricalConnectionInstanceW(id=conn_id, \ - pre_cell=pre_cell_path, \ - pre_segment=preSegId, \ - pre_fraction_along=preFract, - post_cell=post_cell_path, \ - post_segment=postSegId, - post_fraction_along=postFract, - synapse=self.projection_syns[proj_id], - weight=weight) - - self.projections[proj_id].electrical_connection_instance_ws.append(conn) - - + conn = neuroml.ElectricalConnectionInstanceW( + id=conn_id, + pre_cell=pre_cell_path, + pre_segment=preSegId, + pre_fraction_along=preFract, + post_cell=post_cell_path, + post_segment=postSegId, + post_fraction_along=postFract, + synapse=self.projection_syns[proj_id], + weight=weight, + ) + + self.projections[proj_id].electrical_connection_instance_ws.append( + conn + ) + elif isinstance(self.projections[proj_id], neuroml.ContinuousProjection): - + instances = False - if len(self.populations[prePop].instances)>0 or len(self.populations[postPop].instances)>0: + if ( + len(self.populations[prePop].instances) > 0 + or len(self.populations[postPop].instances) > 0 + ): instances = True - + if not instances: - if weight!=1: - raise Exception("Case not (yet) supported: weight!=1 when not an instance based population...") - - conn = neuroml.ContinuousConnection(id=conn_id, \ - pre_cell="%s"%(preCellId), \ - pre_segment=preSegId, \ - pre_fraction_along=preFract, - post_cell="%s"%(postCellId), \ - post_segment=postSegId, - post_fraction_along=postFract, - pre_component=self.projection_syns_pre[proj_id], - post_component=self.projection_syns[proj_id]) - + if weight != 1: + raise Exception( + "Case not (yet) supported: weight!=1 when not an instance based population..." + ) + + conn = neuroml.ContinuousConnection( + id=conn_id, + pre_cell="%s" % (preCellId), + pre_segment=preSegId, + pre_fraction_along=preFract, + post_cell="%s" % (postCellId), + post_segment=postSegId, + post_fraction_along=postFract, + pre_component=self.projection_syns_pre[proj_id], + post_component=self.projection_syns[proj_id], + ) + self.projections[proj_id].continuous_connections.append(conn) - + else: - if weight==1: - conn = neuroml.ContinuousConnectionInstance(id=conn_id, \ - pre_cell=pre_cell_path, \ - pre_segment=preSegId, \ - pre_fraction_along=preFract, - post_cell=post_cell_path, \ - post_segment=postSegId, - post_fraction_along=postFract, - pre_component=self.projection_syns_pre[proj_id], - post_component=self.projection_syns[proj_id]) - - self.projections[proj_id].continuous_connection_instances.append(conn) + if weight == 1: + conn = neuroml.ContinuousConnectionInstance( + id=conn_id, + pre_cell=pre_cell_path, + pre_segment=preSegId, + pre_fraction_along=preFract, + post_cell=post_cell_path, + post_segment=postSegId, + post_fraction_along=postFract, + pre_component=self.projection_syns_pre[proj_id], + post_component=self.projection_syns[proj_id], + ) + + self.projections[proj_id].continuous_connection_instances.append( + conn + ) else: - conn = neuroml.ContinuousConnectionInstanceW(id=conn_id, \ - pre_cell=pre_cell_path, \ - pre_segment=preSegId, \ - pre_fraction_along=preFract, - post_cell=post_cell_path, \ - post_segment=postSegId, - post_fraction_along=postFract, - pre_component=self.projection_syns_pre[proj_id], - post_component=self.projection_syns[proj_id], - weight=weight) - - self.projections[proj_id].continuous_connection_instance_ws.append(conn) - + conn = neuroml.ContinuousConnectionInstanceW( + id=conn_id, + pre_cell=pre_cell_path, + pre_segment=preSegId, + pre_fraction_along=preFract, + post_cell=post_cell_path, + post_segment=postSegId, + post_fraction_along=postFract, + pre_component=self.projection_syns_pre[proj_id], + post_component=self.projection_syns[proj_id], + weight=weight, + ) + + self.projections[proj_id].continuous_connection_instance_ws.append( + conn + ) + else: - if not self.weightDelays[proj_id] and delay==0 and weight==1: + if not self.weightDelays[proj_id] and delay == 0 and weight == 1: - connection = neuroml.Connection(id=conn_id, \ - pre_cell_id=pre_cell_path, \ - pre_segment_id=preSegId, \ - pre_fraction_along=preFract, - post_cell_id=post_cell_path, \ - post_segment_id=postSegId, - post_fraction_along=postFract) + connection = neuroml.Connection( + id=conn_id, + pre_cell_id=pre_cell_path, + pre_segment_id=preSegId, + pre_fraction_along=preFract, + post_cell_id=post_cell_path, + post_segment_id=postSegId, + post_fraction_along=postFract, + ) self.projections[proj_id].connections.append(connection) else: - connection = neuroml.ConnectionWD(id=conn_id, \ - pre_cell_id=pre_cell_path, \ - pre_segment_id=preSegId, \ - pre_fraction_along=preFract, - post_cell_id=post_cell_path, \ - post_segment_id=postSegId, - post_fraction_along=postFract, - weight=weight, - delay='%sms'%(delay)) + connection = neuroml.ConnectionWD( + id=conn_id, + pre_cell_id=pre_cell_path, + pre_segment_id=preSegId, + pre_fraction_along=preFract, + post_cell_id=post_cell_path, + post_segment_id=postSegId, + post_fraction_along=postFract, + weight=weight, + delay="%sms" % (delay), + ) self.projections[proj_id].connection_wds.append(connection) - # # Overridden from DefaultNetworkHandler - # - def handle_input_list(self, inputListId, population_id, component, size, input_comp_obj=None): + # + def handle_input_list( + self, inputListId, population_id, component, size, input_comp_obj=None + ): if input_comp_obj: self.nml_doc.append(input_comp_obj) - - input_list = neuroml.InputList(id=inputListId, - component=component, - populations=population_id) - - self.input_lists[inputListId]=input_list - + + input_list = neuroml.InputList( + id=inputListId, component=component, populations=population_id + ) + + self.input_lists[inputListId] = input_list + self.network.input_lists.append(input_list) - + # # Overridden from DefaultNetworkHandler - # - def handle_single_input(self, inputListId, id, cellId, segId = 0, fract = 0.5, weight = 1.0): + # + def handle_single_input( + self, inputListId, id, cellId, segId=0, fract=0.5, weight=1.0 + ): input_list = self.input_lists[inputListId] - target_path = "../%s/%i/%s"%(input_list.populations, cellId, self.populations[input_list.populations].component) - - if self.populations[input_list.populations].type==None: - target_path = "../%s[%i]"%(input_list.populations, cellId) - - if weight==1: - input = neuroml.Input(id=id, - target=target_path, - destination="synapses") - if segId!=0: - input.segment_id="%s"%(segId) - if fract!=0.5: - input.fraction_along="%s"%(fract) + target_path = "../%s/%i/%s" % ( + input_list.populations, + cellId, + self.populations[input_list.populations].component, + ) + + if self.populations[input_list.populations].type == None: + target_path = "../%s[%i]" % (input_list.populations, cellId) + + if weight == 1: + input = neuroml.Input(id=id, target=target_path, destination="synapses") + if segId != 0: + input.segment_id = "%s" % (segId) + if fract != 0.5: + input.fraction_along = "%s" % (fract) input_list.input.append(input) else: - input_w = neuroml.InputW(id=id, - target=target_path, - destination="synapses") - if segId!=0: - input_w.segment_id="%s"%(segId) - if fract!=0.5: - input_w.fraction_along="%s"%(fract) - input_w.weight=weight - input_list.input_ws.append(input_w) \ No newline at end of file + input_w = neuroml.InputW(id=id, target=target_path, destination="synapses") + if segId != 0: + input_w.segment_id = "%s" % (segId) + if fract != 0.5: + input_w.fraction_along = "%s" % (fract) + input_w.weight = weight + input_list.input_ws.append(input_w) diff --git a/neuroml/hdf5/NetworkContainer.py b/neuroml/hdf5/NetworkContainer.py index 82ad4b66..8434384e 100644 --- a/neuroml/hdf5/NetworkContainer.py +++ b/neuroml/hdf5/NetworkContainer.py @@ -1,437 +1,571 @@ -''' +""" Work in progress.. -''' +""" import neuroml import numpy as np class NetworkContainer(neuroml.Network): - + pass - #def __getattr__(self,name): + # def __getattr__(self,name): # print('Requesting in NC: %s'%name) - + class OptimizedList(object): - + array = None cursor = 0 - - def __init__(self, array=None,indices={}): + + def __init__(self, array=None, indices={}): self.array = array self.indices = indices - #print("Created %s with %s"% (self,indices)) - - - def _get_index_or_add(self,name,default_index): + # print("Created %s with %s"% (self,indices)) + + def _get_index_or_add(self, name, default_index): if not name in self.indices: self.indices[name] = default_index return default_index else: return self.indices[name] - - def _get_value(self,i,name,default_value): + + def _get_value(self, i, name, default_value): if not name in self.indices: return default_value else: return self.array[i][self.indices[name]] - - def _add_index_information(self,hdf5_array): + + def _add_index_information(self, hdf5_array): for name in self.indices.keys(): - - n="column_%i"%self.indices[name] - #print("Adding attribute to H5 array: %s = %s"%(n,name)) + + n = "column_%i" % self.indices[name] + # print("Adding attribute to H5 array: %s = %s"%(n,name)) hdf5_array._f_setattr(n, name) - + def __len__(self): - length = self.array.shape[0] if isinstance(self.array,np.ndarray) else 0 - #print('Getting length (%s) of %s'%(length,self.__class__.__name__,)) + length = self.array.shape[0] if isinstance(self.array, np.ndarray) else 0 + # print('Getting length (%s) of %s'%(length,self.__class__.__name__,)) return length - + def __iter__(self): self.cursor = 0 return self - + def __next__(self): if self.cursor == len(self): raise StopIteration else: instance = self.__getitem__(self.cursor) - self.cursor +=1 + self.cursor += 1 return instance def next(self): return self.__next__() - - def __iadd__(self,i_list): + + def __iadd__(self, i_list): for i in i_list: self.append(i) return self - + def __delitem__(self, index): - - raise NotImplementedError("__delitem__ is not implemented (yet) in HDF5 based optimized list") - def __setitem__(self,index,instance): - - raise NotImplementedError("__setitem__() is not implemented (yet) in HDF5 based instance list") - + raise NotImplementedError( + "__delitem__ is not implemented (yet) in HDF5 based optimized list" + ) + + def __setitem__(self, index, instance): + + raise NotImplementedError( + "__setitem__() is not implemented (yet) in HDF5 based instance list" + ) + def __str__(self): - - return "%s (optimized) with %s entries"%(self.__class__.__name__,len(self)) - - -class InstanceList(OptimizedList): + return "%s (optimized) with %s entries" % (self.__class__.__name__, len(self)) - def __getitem__(self,index): - if len(self.array[index])==4: - id = self.array[index][self._get_index_or_add('id',0)] - #print(' Getting instance %s in %s (%s)'%(index,self,id)) - assert(id==index) +class InstanceList(OptimizedList): + def __getitem__(self, index): + + if len(self.array[index]) == 4: + id = self.array[index][self._get_index_or_add("id", 0)] + # print(' Getting instance %s in %s (%s)'%(index,self,id)) + assert id == index else: id = index - + instance = neuroml.Instance(id=id) - instance.location = neuroml.Location(self.array[index][self._get_index_or_add('x',1)], - self.array[index][self._get_index_or_add('y',2)], - self.array[index][self._get_index_or_add('z',3)]) - + instance.location = neuroml.Location( + self.array[index][self._get_index_or_add("x", 1)], + self.array[index][self._get_index_or_add("y", 2)], + self.array[index][self._get_index_or_add("z", 3)], + ) + return instance - - - def append(self,instance): - - #print('Adding instance: %s'%instance) + + def append(self, instance): + + # print('Adding instance: %s'%instance) l = instance.location - i = np.array([[instance.id,l.x,l.y,l.z]], np.float32) - if len(self)==0: + i = np.array([[instance.id, l.x, l.y, l.z]], np.float32) + if len(self) == 0: self.array = i else: self.array = np.concatenate((self.array, i)) - - def add_instance(self,id,x,y,z): - - i = np.array([[id,x,y,z]], np.float32) - if len(self)==0: + + def add_instance(self, id, x, y, z): + + i = np.array([[id, x, y, z]], np.float32) + if len(self) == 0: self.array = i else: self.array = np.concatenate((self.array, i)) - - - - + + class PopulationContainer(neuroml.Population): + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + component=None, + size=None, + type=None, + extracellular_properties=None, + layout=None, + instances=None, + ): + super(self.__class__, self).__init__( + neuro_lex_id=neuro_lex_id, + id=id, + metaid=metaid, + notes=notes, + properties=properties, + annotation=annotation, + component=component, + size=size, + type=type, + extracellular_properties=extracellular_properties, + layout=layout, + instances=instances, + ) - - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, component=None, size=None, type=None, extracellular_properties=None, layout=None, instances=None): - super(self.__class__, self).__init__(neuro_lex_id=neuro_lex_id, id=id, metaid=metaid, notes=notes, properties=properties, annotation=annotation, component=component, size=size, type=type, extracellular_properties=extracellular_properties, layout=layout, instances=instances) - self.instances = InstanceList() - - #print("PopulationContainer created") - + + # print("PopulationContainer created") + def __str__(self): - - return "Population (optimized): "+str(self.id)+" with "+str( self.get_size() )+" components of type "+(self.component if self.component else "???") - + + return ( + "Population (optimized): " + + str(self.id) + + " with " + + str(self.get_size()) + + " components of type " + + (self.component if self.component else "???") + ) + def exportHdf5(self, h5file, h5Group): - #print("Exporting %s as HDF5..."%self) - - popGroup = h5file.create_group(h5Group, 'population_'+self.id) + # print("Exporting %s as HDF5..."%self) + + popGroup = h5file.create_group(h5Group, "population_" + self.id) popGroup._f_setattr("id", self.id) popGroup._f_setattr("component", self.component) - + for p in self.properties: - popGroup._f_setattr("property:%s"%p.tag, p) - - if len(self.instances)>0: + popGroup._f_setattr("property:%s" % p.tag, p) + + if len(self.instances) > 0: popGroup._f_setattr("size", len(self.instances)) popGroup._f_setattr("type", "populationList") - h5array = h5file.create_carray(popGroup, self.id, obj=self.instances.array, title="Locations of cells in "+ self.id) - + h5array = h5file.create_carray( + popGroup, + self.id, + obj=self.instances.array, + title="Locations of cells in " + self.id, + ) + self.instances._add_index_information(h5array) - + else: popGroup._f_setattr("size", self.size) - - + + class ProjectionContainer(neuroml.Projection): - - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, synapse=None, connections=None, connection_wds=None): - - super(self.__class__, self).__init__(neuro_lex_id=neuro_lex_id, id=id, presynaptic_population=presynaptic_population,postsynaptic_population=postsynaptic_population,synapse=synapse,connections=connections,connection_wds=connection_wds) - - assert(connections==None) - assert(connection_wds==None) + def __init__( + self, + neuro_lex_id=None, + id=None, + presynaptic_population=None, + postsynaptic_population=None, + synapse=None, + connections=None, + connection_wds=None, + ): + + super(self.__class__, self).__init__( + neuro_lex_id=neuro_lex_id, + id=id, + presynaptic_population=presynaptic_population, + postsynaptic_population=postsynaptic_population, + synapse=synapse, + connections=connections, + connection_wds=connection_wds, + ) + + assert connections == None + assert connection_wds == None self.connections = ConnectionList() self.connection_wds = ConnectionList() self.connections.presynaptic_population = presynaptic_population self.connections.postsynaptic_population = postsynaptic_population - - + def __str__(self): - return "Projection (optimized): "+self.id+" from "+self.presynaptic_population+" to "+self.postsynaptic_population+", synapse: "+self.synapse - - + return ( + "Projection (optimized): " + + self.id + + " from " + + self.presynaptic_population + + " to " + + self.postsynaptic_population + + ", synapse: " + + self.synapse + ) + def exportHdf5(self, h5file, h5Group): - #print("Exporting %s as HDF5"%self) - - projGroup = h5file.create_group(h5Group, 'projection_'+self.id) + # print("Exporting %s as HDF5"%self) + + projGroup = h5file.create_group(h5Group, "projection_" + self.id) projGroup._f_setattr("id", self.id) projGroup._f_setattr("type", "projection") projGroup._f_setattr("presynapticPopulation", self.presynaptic_population) projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population) projGroup._f_setattr("synapse", self.synapse) - - h5array = h5file.create_carray(projGroup, self.id, obj=self.connections.array, title="Connections of cells in "+ self.id) - + + h5array = h5file.create_carray( + projGroup, + self.id, + obj=self.connections.array, + title="Connections of cells in " + self.id, + ) + self.connections._add_index_information(h5array) - - + + ## ## TODO: update this to act like ElectricalProjection ## class ElectricalProjectionContainer(neuroml.ElectricalProjection): - - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, electrical_connections=None, electrical_connection_instances=None): - - super(self.__class__, self).__init__(neuro_lex_id=neuro_lex_id, id=id, presynaptic_population=presynaptic_population,postsynaptic_population=postsynaptic_population,electrical_connections=electrical_connections,electrical_connection_instances=electrical_connection_instances) - - assert(electrical_connections==None) - assert(electrical_connection_instances==None) + def __init__( + self, + neuro_lex_id=None, + id=None, + presynaptic_population=None, + postsynaptic_population=None, + electrical_connections=None, + electrical_connection_instances=None, + ): + + super(self.__class__, self).__init__( + neuro_lex_id=neuro_lex_id, + id=id, + presynaptic_population=presynaptic_population, + postsynaptic_population=postsynaptic_population, + electrical_connections=electrical_connections, + electrical_connection_instances=electrical_connection_instances, + ) + + assert electrical_connections == None + assert electrical_connection_instances == None self.connections = ConnectionList() self.connection_wds = ConnectionList() self.connections.presynaptic_population = presynaptic_population self.connections.postsynaptic_population = postsynaptic_population - - + def __str__(self): - return "Electrical projection (optimized): "+self.id+" from "+self.presynaptic_population+" to "+self.postsynaptic_population+", synapse: "+self.synapse - - + return ( + "Electrical projection (optimized): " + + self.id + + " from " + + self.presynaptic_population + + " to " + + self.postsynaptic_population + + ", synapse: " + + self.synapse + ) + def exportHdf5(self, h5file, h5Group): - #print("Exporting %s as HDF5"%self) - - projGroup = h5file.create_group(h5Group, 'projection_'+self.id) + # print("Exporting %s as HDF5"%self) + + projGroup = h5file.create_group(h5Group, "projection_" + self.id) projGroup._f_setattr("id", self.id) projGroup._f_setattr("type", "projection") projGroup._f_setattr("presynapticPopulation", self.presynaptic_population) projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population) projGroup._f_setattr("synapse", self.synapse) - - h5array = h5file.create_carray(projGroup, self.id, obj=self.connections.array, title="Connections of cells in "+ self.id) - + + h5array = h5file.create_carray( + projGroup, + self.id, + obj=self.connections.array, + title="Connections of cells in " + self.id, + ) + self.connections._add_index_information(h5array) - - + + class ConnectionList(OptimizedList): presynaptic_population = None postsynaptic_population = None - - def __getitem__(self,index): - id_index = self._get_index_or_add('id',-1) - if id_index>0: + def __getitem__(self, index): + + id_index = self._get_index_or_add("id", -1) + if id_index > 0: id = int(self.array[index][id_index]) - assert(self.array[index][0]==index) + assert self.array[index][0] == index else: id = index - - pre_cell_id = int(self.array[index][self._get_index_or_add('pre_cell_id',1)]) - post_cell_id = int(self.array[index][self._get_index_or_add('post_cell_id',2)]) - pre_segment_id = int(self._get_value(index,'pre_segment_id',0)) - post_segment_id = int(self._get_value(index,'post_segment_id',0)) - pre_fraction_along = float(self._get_value(index,'pre_fraction_along',0.5)) - post_fraction_along = float(self._get_value(index,'post_fraction_along',0.5)) - - input = neuroml.Connection(id=id, - pre_cell_id="../%s/%i/%s"%(self.presynaptic_population,pre_cell_id,"???"), - post_cell_id="../%s/%i/%s"%(self.postsynaptic_population,post_cell_id,"???"), - pre_segment_id=pre_segment_id, - post_segment_id=post_segment_id, - pre_fraction_along=pre_fraction_along, - post_fraction_along=post_fraction_along) - + + pre_cell_id = int(self.array[index][self._get_index_or_add("pre_cell_id", 1)]) + post_cell_id = int(self.array[index][self._get_index_or_add("post_cell_id", 2)]) + pre_segment_id = int(self._get_value(index, "pre_segment_id", 0)) + post_segment_id = int(self._get_value(index, "post_segment_id", 0)) + pre_fraction_along = float(self._get_value(index, "pre_fraction_along", 0.5)) + post_fraction_along = float(self._get_value(index, "post_fraction_along", 0.5)) + + input = neuroml.Connection( + id=id, + pre_cell_id="../%s/%i/%s" + % (self.presynaptic_population, pre_cell_id, "???"), + post_cell_id="../%s/%i/%s" + % (self.postsynaptic_population, post_cell_id, "???"), + pre_segment_id=pre_segment_id, + post_segment_id=post_segment_id, + pre_fraction_along=pre_fraction_along, + post_fraction_along=post_fraction_along, + ) + return input - - - def append(self,conn): - - i = np.array([[conn.id, - conn.get_pre_cell_id(), - conn.get_post_cell_id()]], np.float32) - - if len(self)==0: + + def append(self, conn): + + i = np.array( + [[conn.id, conn.get_pre_cell_id(), conn.get_post_cell_id()]], np.float32 + ) + + if len(self) == 0: self.array = i else: self.array = np.concatenate((self.array, i)) - - + + class InputListContainer(neuroml.InputList): - - def __init__(self, neuro_lex_id=None, id=None, populations=None, component=None, input=None): - super(self.__class__, self).__init__(neuro_lex_id=neuro_lex_id, id=id, populations=populations, component=component, input=input) - - assert(input==None) + def __init__( + self, neuro_lex_id=None, id=None, populations=None, component=None, input=None + ): + super(self.__class__, self).__init__( + neuro_lex_id=neuro_lex_id, + id=id, + populations=populations, + component=component, + input=input, + ) + + assert input == None self.input = InputsList() self.input.target_population = populations - - #print("InputListContainer %s created"%self.id) - + + # print("InputListContainer %s created"%self.id) + def __str__(self): - - return "Input list (optimized): "+self.id+" to "+self.populations+", component "+self.component - + + return ( + "Input list (optimized): " + + self.id + + " to " + + self.populations + + ", component " + + self.component + ) + def exportHdf5(self, h5file, h5Group): - #print("Exporting %s as HDF5"%self) - - ilGroup = h5file.create_group(h5Group, 'inputList_'+self.id) + # print("Exporting %s as HDF5"%self) + + ilGroup = h5file.create_group(h5Group, "inputList_" + self.id) ilGroup._f_setattr("id", self.id) ilGroup._f_setattr("component", self.component) ilGroup._f_setattr("population", self.populations) - - - h5array = h5file.create_carray(ilGroup, self.id, obj=self.input.array , title="Locations of inputs in "+ self.id) - + + h5array = h5file.create_carray( + ilGroup, + self.id, + obj=self.input.array, + title="Locations of inputs in " + self.id, + ) + self.input._add_index_information(h5array) - - - - - + + class InputsList(OptimizedList): target_population = None - - def __getitem__(self,index): - - #print(' Getting instance %s'%(index)) - #print self.array - - id = self.array[index][self._get_index_or_add('id',0)] - assert(id==index) - - target_cell_id = int(self.array[index][self._get_index_or_add('target_cell_id',1)]) - segment_id = int(self.array[index][self._get_index_or_add('segment_id',1)]) - fraction_along = float(self.array[index][self._get_index_or_add('fraction_along',1)]) - - input = neuroml.Input(id=index, - target="../%s/%i/%s"%(self.target_population, target_cell_id, "???"), - destination="synapses", - segment_id=segment_id, - fraction_along=fraction_along) - + + def __getitem__(self, index): + + # print(' Getting instance %s'%(index)) + # print self.array + + id = self.array[index][self._get_index_or_add("id", 0)] + assert id == index + + target_cell_id = int( + self.array[index][self._get_index_or_add("target_cell_id", 1)] + ) + segment_id = int(self.array[index][self._get_index_or_add("segment_id", 1)]) + fraction_along = float( + self.array[index][self._get_index_or_add("fraction_along", 1)] + ) + + input = neuroml.Input( + id=index, + target="../%s/%i/%s" % (self.target_population, target_cell_id, "???"), + destination="synapses", + segment_id=segment_id, + fraction_along=fraction_along, + ) + return input - - - def append(self,input): - - #print('Adding input: %s'%input) - - i = np.array([[input.id, - input.get_target_cell_id(), - input.get_segment_id(), - input.get_fraction_along()]], np.float32) - - if len(self)==0: + + def append(self, input): + + # print('Adding input: %s'%input) + + i = np.array( + [ + [ + input.id, + input.get_target_cell_id(), + input.get_segment_id(), + input.get_fraction_along(), + ] + ], + np.float32, + ) + + if len(self) == 0: self.array = i else: self.array = np.concatenate((self.array, i)) - -if __name__ == '__main__': - + +if __name__ == "__main__": + from neuroml.loaders import read_neuroml2_file - - - file_name = '../examples/test_files/MediumNet.net.nml' - - nml_doc = read_neuroml2_file(file_name,include_includes=True) - + + file_name = "../examples/test_files/MediumNet.net.nml" + + nml_doc = read_neuroml2_file(file_name, include_includes=True) + print(nml_doc.summary()) - + net0 = nml_doc.networks[0] - + nc = NetworkContainer(id="testnet") - pc0 = PopulationContainer(id="pre", component="iffy",size=4) + pc0 = PopulationContainer(id="pre", component="iffy", size=4) nc.populations.append(pc0) pc = PopulationContainer(id="fake", component="izzy") nc.populations.append(pc) instance = neuroml.Instance(0) - instance.location = neuroml.Location(100,100,33.333) + instance.location = neuroml.Location(100, 100, 33.333) pc.instances.append(instance) instance = neuroml.Instance(1) - instance.location = neuroml.Location(200,200,66.66) + instance.location = neuroml.Location(200, 200, 66.66) pc.instances.append(instance) - - prc = ProjectionContainer(id="proj", presynaptic_population=pc0.id, postsynaptic_population=pc.id,synapse="ampa") - - conn = neuroml.Connection(id=0, - pre_cell_id="../%s/%i/%s"%(pc0.id,0,pc0.component), \ - pre_segment_id=2, \ - pre_fraction_along=0.1, - post_cell_id="../%s/%i/%s"%(pc.id,2,pc.id)) - prc.connections.append(conn) + + prc = ProjectionContainer( + id="proj", + presynaptic_population=pc0.id, + postsynaptic_population=pc.id, + synapse="ampa", + ) + + conn = neuroml.Connection( + id=0, + pre_cell_id="../%s/%i/%s" % (pc0.id, 0, pc0.component), + pre_segment_id=2, + pre_fraction_along=0.1, + post_cell_id="../%s/%i/%s" % (pc.id, 2, pc.id), + ) + prc.connections.append(conn) nc.projections.append(prc) - - ilc = InputListContainer(id="iii",component="CCC",populations=pc.id) + + ilc = InputListContainer(id="iii", component="CCC", populations=pc.id) nc.input_lists.append(ilc) - ilc.input.append(neuroml.Input(id=0,target="../pyramidals_48/37/pyr_4_sym", destination="synapses", segment_id="2", fraction_along="0.3")) - - + ilc.input.append( + neuroml.Input( + id=0, + target="../pyramidals_48/37/pyr_4_sym", + destination="synapses", + segment_id="2", + fraction_along="0.3", + ) + ) + nc2 = NetworkContainer(id="testnet2") - - pc2 = PopulationContainer(id="fake2", component="iaf",size=4) + + pc2 = PopulationContainer(id="fake2", component="iaf", size=4) nc2.populations.append(pc2) - + print(pc) - - nets = [nc,nc2,net0] + + nets = [nc, nc2, net0] nets = [nc] - + for n in nets: - print('\n--------------------------------') + print("\n--------------------------------") print(n) for p in n.populations: - print(" %s"%p) - print(" > %s"%(p.instances)) + print(" %s" % p) + print(" > %s" % (p.instances)) for i in p.instances: - print(" > %s"%i) + print(" > %s" % i) for pr in n.projections: - print(" %s"%pr) - print(" > %s"%(pr.connections)) + print(" %s" % pr) + print(" > %s" % (pr.connections)) for c in pr.connections: - print(" > %s"%c) + print(" > %s" % c) for il in n.input_lists: - print(" %s"%il) + print(" %s" % il) for i in il.input: - print(" > %s"%i) - + print(" > %s" % i) + nml_doc_c = neuroml.NeuroMLDocument(id="ndc") nml_doc_c.networks.append(nc) - print('\n++++++++++') + print("\n++++++++++") print(nml_doc_c.summary()) - - print('\n++++++++++') + + print("\n++++++++++") exit() - - file_name = '../examples/tmp/MediumNet.net.nml.h5' - - nml_doc = read_neuroml2_file(file_name,optimized=True,include_includes=True) - + + file_name = "../examples/tmp/MediumNet.net.nml.h5" + + nml_doc = read_neuroml2_file(file_name, optimized=True, include_includes=True) + print(nml_doc.summary()) - - file_name = '../examples/tmp/MediumNet2.net.nml.h5' - + + file_name = "../examples/tmp/MediumNet2.net.nml.h5" + from neuroml.writers import NeuroMLHdf5Writer - - NeuroMLHdf5Writer.write(nml_doc,file_name) - - print("Rewritten to %s"%file_name) \ No newline at end of file + + NeuroMLHdf5Writer.write(nml_doc, file_name) + + print("Rewritten to %s" % file_name) diff --git a/neuroml/hdf5/NeuroMLHdf5Parser.py b/neuroml/hdf5/NeuroMLHdf5Parser.py index 17e5ed7a..11fb342e 100644 --- a/neuroml/hdf5/NeuroMLHdf5Parser.py +++ b/neuroml/hdf5/NeuroMLHdf5Parser.py @@ -2,7 +2,7 @@ # # A class to parse HDF5 based NeuroML files. # Calls the appropriate methods in DefaultNetworkHandler when cell locations, -# network connections are found. The DefaultNetworkHandler can either print +# network connections are found. The DefaultNetworkHandler can either print # information, or if it's a class overriding DefaultNetworkHandler can create # the appropriate network in a simulator dependent fashion # @@ -13,13 +13,13 @@ # This work has been funded by the Medical Research Council & Wellcome Trust # # - + import logging import sys import inspect -import tables # pytables for HDF5 support -import numpy as np +import tables # pytables for HDF5 support +import numpy as np import neuroml @@ -38,627 +38,767 @@ import os.path -class NeuroMLHdf5Parser(): - - log = logging.getLogger("NeuroMLHdf5Parser") - - currPopulation = "" - currentComponent = "" - totalInstances = 0 - - currentProjectionId = "" - currentProjectionType = "" - currentProjectionPrePop = "" - currentProjectionPostPop = "" - currentSynapse = "" - currentPreSynapse = "" - - currInputList = "" - - nml_doc_extra_elements = None - - optimized=False - - def __init__ (self, netHandler, optimized=False): - self.netHandler = netHandler - self.optimized = optimized - - if not self.optimized: - # For continued use with old API - if not hasattr(self.netHandler,'handle_network') or hasattr(self.netHandler,'handleNetwork'): - self.netHandler.handle_network = self.netHandler.handleNetwork - if not hasattr(self.netHandler,'handle_document_start') or hasattr(self.netHandler,'handleDocumentStart'): - self.netHandler.handle_document_start = self.netHandler.handleDocumentStart - if not hasattr(self.netHandler,'handle_population') or hasattr(self.netHandler,'handlePopulation'): - self.netHandler.handle_population = self.netHandler.handlePopulation - if not hasattr(self.netHandler,'handle_location') or hasattr(self.netHandler,'handleLocation'): - self.netHandler.handle_location = self.netHandler.handleLocation - - if not hasattr(self.netHandler,'handle_projection') or hasattr(self.netHandler,'handleProjection'): - self.netHandler.handle_projection = self.netHandler.handleProjection - if not hasattr(self.netHandler,'finalise_projection') or hasattr(self.netHandler,'finaliseProjection'): - self.netHandler.finalise_projection = self.netHandler.finaliseProjection - - if not hasattr(self.netHandler,'handle_connection') or hasattr(self.netHandler,'handleConnection'): - self.netHandler.handle_connection = self.netHandler.handleConnection - if not hasattr(self.netHandler,'handle_input_list') or hasattr(self.netHandler,'handleInputList'): - self.netHandler.handle_input_list = self.netHandler.handleInputList - if not hasattr(self.netHandler,'handle_single_input') or hasattr(self.netHandler,'handleSingleInput'): - self.netHandler.handle_single_input = self.netHandler.handleSingleInput - if not hasattr(self.netHandler,'finalise_input_source') or hasattr(self.netHandler,'finaliseInputSource'): - self.netHandler.finalise_input_source = self.netHandler.finaliseInputSource - - - - def parse(self, filename): - - h5file=tables.open_file(filename,mode='r') - - self.log.info("Opened HDF5 file: %s; id=%s"%(h5file.filename,h5file.root.neuroml._v_attrs.id)) - - if hasattr(h5file.root.neuroml._v_attrs,"neuroml_top_level"): - nml = get_str_attribute_group(h5file.root.neuroml,"neuroml_top_level") - - try: - self.nml_doc_extra_elements = read_neuroml2_string(nml, - include_includes=True, - verbose=False, - base_path=os.path.dirname(os.path.abspath(filename))) - except Exception as e: - print('Unable to read XML string extracted from HDF5 file!') - print(e) - print(nml) - raise e - - self.log.info("Extracted NeuroML2 elements from extra string found in HDF5 file") - - self.parse_group(h5file.root.neuroml) - - h5file.close() - - def get_nml_doc(self): - - if not self.optimized: - nml2_doc = nmlHandler.get_nml_doc() - if self.nml_doc_extra_elements: - add_all_to_document(self.nml_doc_extra_elements,nml2_doc) - return nml2_doc - else: - - nml_doc = neuroml.NeuroMLDocument(id=self.doc_id, notes=self.doc_notes) - if self.nml_doc_extra_elements: - add_all_to_document(self.nml_doc_extra_elements,nml_doc) - nml_doc.networks.append(self.optimizedNetwork) - - return nml_doc - - - - def _is_dataset(self,node): - return node._c_classid == 'ARRAY' or node._c_classid == 'CARRAY' - - def parse_group(self, g): - self.log.debug("Parsing group: "+ str(g)) - - self.start_group(g) - - # Note this ensures groups are parsed before datasets. Important for synapse props - - # Ensure populations parsed first! - for node in g: - - if node._c_classid == 'GROUP' and node._v_name.count('population_')>=1: - self.log.debug("Sub node: "+ str(node)+ ", class: "+ node._c_classid) - self.parse_group(node) - - # Non populations! - for node in g: - - if node._c_classid == 'GROUP' and node._v_name.count('population_')==0: - self.log.debug("Sub node (ng): "+ str(node)+ ", class: "+ node._c_classid) - self.parse_group(node) - - for node in g: - self.log.debug("Sub node: "+ str(node)+ ", class: "+ node._c_classid) - - if self._is_dataset(node): - self.parse_dataset(node) - - self.end_group(g) - - def _extract_named_indices(self,d): - - named_indices = {} - - for attrName in d.attrs._v_attrnames: - - if 'column_' in attrName: - val = d.attrs.__getattr__(attrName) - if isinstance(val,str): - name = val - else: - name = val[0] - - index = int(attrName[len('column_'):]) - - named_indices[name] = index - - return named_indices - - - def parse_dataset(self, d): - self.log.debug("Parsing dataset/array: "+ str(d)) - - if self.currPopulation!="": - self.log.debug("Using data for population: "+ self.currPopulation) - self.log.debug("Size is: "+str(d.shape[0])+" rows of: "+ str(d.shape[1])+ " entries") - +class NeuroMLHdf5Parser: + + log = logging.getLogger("NeuroMLHdf5Parser") + + currPopulation = "" + currentComponent = "" + totalInstances = 0 + + currentProjectionId = "" + currentProjectionType = "" + currentProjectionPrePop = "" + currentProjectionPostPop = "" + currentSynapse = "" + currentPreSynapse = "" + + currInputList = "" + + nml_doc_extra_elements = None + + optimized = False + + def __init__(self, netHandler, optimized=False): + self.netHandler = netHandler + self.optimized = optimized + if not self.optimized: - - indexId = -1 - indexX = -1 - indexY = -1 - indexZ = -1 - - for attrName in d.attrs._v_attrnames: - val = d.attrs.__getattr__(attrName) + # For continued use with old API + if not hasattr(self.netHandler, "handle_network") or hasattr( + self.netHandler, "handleNetwork" + ): + self.netHandler.handle_network = self.netHandler.handleNetwork + if not hasattr(self.netHandler, "handle_document_start") or hasattr( + self.netHandler, "handleDocumentStart" + ): + self.netHandler.handle_document_start = ( + self.netHandler.handleDocumentStart + ) + if not hasattr(self.netHandler, "handle_population") or hasattr( + self.netHandler, "handlePopulation" + ): + self.netHandler.handle_population = self.netHandler.handlePopulation + if not hasattr(self.netHandler, "handle_location") or hasattr( + self.netHandler, "handleLocation" + ): + self.netHandler.handle_location = self.netHandler.handleLocation + + if not hasattr(self.netHandler, "handle_projection") or hasattr( + self.netHandler, "handleProjection" + ): + self.netHandler.handle_projection = self.netHandler.handleProjection + if not hasattr(self.netHandler, "finalise_projection") or hasattr( + self.netHandler, "finaliseProjection" + ): + self.netHandler.finalise_projection = self.netHandler.finaliseProjection + + if not hasattr(self.netHandler, "handle_connection") or hasattr( + self.netHandler, "handleConnection" + ): + self.netHandler.handle_connection = self.netHandler.handleConnection + if not hasattr(self.netHandler, "handle_input_list") or hasattr( + self.netHandler, "handleInputList" + ): + self.netHandler.handle_input_list = self.netHandler.handleInputList + if not hasattr(self.netHandler, "handle_single_input") or hasattr( + self.netHandler, "handleSingleInput" + ): + self.netHandler.handle_single_input = self.netHandler.handleSingleInput + if not hasattr(self.netHandler, "finalise_input_source") or hasattr( + self.netHandler, "finaliseInputSource" + ): + self.netHandler.finalise_input_source = ( + self.netHandler.finaliseInputSource + ) + + def parse(self, filename): + + h5file = tables.open_file(filename, mode="r") + + self.log.info( + "Opened HDF5 file: %s; id=%s" + % (h5file.filename, h5file.root.neuroml._v_attrs.id) + ) + + if hasattr(h5file.root.neuroml._v_attrs, "neuroml_top_level"): + nml = get_str_attribute_group(h5file.root.neuroml, "neuroml_top_level") + + try: + self.nml_doc_extra_elements = read_neuroml2_string( + nml, + include_includes=True, + verbose=False, + base_path=os.path.dirname(os.path.abspath(filename)), + ) + except Exception as e: + print("Unable to read XML string extracted from HDF5 file!") + print(e) + print(nml) + raise e + + self.log.info( + "Extracted NeuroML2 elements from extra string found in HDF5 file" + ) + + self.parse_group(h5file.root.neuroml) + + h5file.close() + + def get_nml_doc(self): - if val == 'id' or val[0] == 'id': - indexId = int(attrName[len('column_'):]) - if val == 'x' or val[0] == 'x': - indexX = int(attrName[len('column_'):]) - if val == 'y' or val[0] == 'y': - indexY = int(attrName[len('column_'):]) - if val == 'z' or val[0] == 'z': - indexZ = int(attrName[len('column_'):]) - - if indexId<0: - if len(d[0])==4: indexId=0 - if indexX<0: - if len(d[0])==3: indexX=0 - if len(d[0])==4: indexX=1 - if indexY<0: - if len(d[0])==3: indexY=1 - if len(d[0])==4: indexY=2 - if indexZ<0: - if len(d[0])==3: indexZ=2 - if len(d[0])==4: indexY=3 - - for i in range(0, d.shape[0]): - - self.netHandler.handle_location( int(d[i,indexId]) if indexId>=0 else i, \ - self.currPopulation, \ - self.currentComponent, \ - float(d[i,indexX]), \ - float(d[i,indexY]), \ - float(d[i,indexZ])) - else: - - #TODO: a better way to convert??? - a = np.array(d) - self.currOptPopulation.instances = InstanceList(array=a,indices=self._extract_named_indices(d)) - - - elif self.currentProjectionId!="": - self.log.debug("Using data for proj: "+ self.currentProjectionId) - self.log.debug("Size is: "+str(d.shape[0])+" rows of: "+ str(d.shape[1])+ " entries") - if not self.optimized: - - indexId = -1 - indexPreCellId = -1 - indexPreSegId = -1 - indexPreFractAlong= -1 - indexPostCellId = -1 - indexPostSegId = -1 - indexPostFractAlong= -1 - indexWeight= -1 - indexDelay= -1 - - id = -1 - preCellId = -1 - preSegId = 0 - preFractAlong= 0.5 - postCellId = -1 - postSegId = 0 - postFractAlong= 0.5 - - type="projection" - - extraParamIndices = {} - - - for attrName in d.attrs._v_attrnames: - val = d.attrs.__getattr__(attrName) - if not isinstance(val, str): - val = val.decode() - - #self.log.debug("Val of attribute: "+ attrName + " is "+ str(val)) - - if val == 'id' or val[0] == 'id': - indexId = int(attrName[len('column_'):]) - elif val == 'pre_cell_id' or val[0] == 'pre_cell_id': - indexPreCellId = int(attrName[len('column_'):]) - elif val == 'pre_segment_id' or val[0] == 'pre_segment_id': - indexPreSegId = int(attrName[len('column_'):]) - elif val == 'pre_fraction_along' or val[0] == 'pre_fraction_along': - indexPreFractAlong = int(attrName[len('column_'):]) - elif val == 'post_cell_id' or val[0] == 'post_cell_id': - indexPostCellId = int(attrName[len('column_'):]) - elif val == 'post_segment_id' or val[0] == 'post_segment_id': - indexPostSegId = int(attrName[len('column_'):]) - elif val == 'post_fraction_along' or val[0] == 'post_fraction_along': - indexPostFractAlong = int(attrName[len('column_'):]) - elif val == 'weight' or val[0] == 'weight': - indexWeight = int(attrName[len('column_'):]) - elif val == 'delay' or val[0] == 'delay': - indexDelay = int(attrName[len('column_'):]) + nml2_doc = nmlHandler.get_nml_doc() + if self.nml_doc_extra_elements: + add_all_to_document(self.nml_doc_extra_elements, nml2_doc) + return nml2_doc + else: + nml_doc = neuroml.NeuroMLDocument(id=self.doc_id, notes=self.doc_notes) if self.nml_doc_extra_elements: - synapse_obj = self.nml_doc_extra_elements.get_by_id(self.currentSynapse) - - pre_synapse_obj = self.nml_doc_extra_elements.get_by_id(self.currentPreSynapse) if len(self.currentPreSynapse)>0 else None - - else: - synapse_obj = None - pre_synapse_obj = None + add_all_to_document(self.nml_doc_extra_elements, nml_doc) + nml_doc.networks.append(self.optimizedNetwork) + + return nml_doc + + def _is_dataset(self, node): + return node._c_classid == "ARRAY" or node._c_classid == "CARRAY" - self.netHandler.handle_projection(self.currentProjectionId, - self.currentProjectionPrePop, - self.currentProjectionPostPop, - self.currentSynapse, - hasWeights=indexWeight>0, - hasDelays=indexDelay>0, - type=self.currentProjectionType, - synapse_obj = synapse_obj, - pre_synapse_obj = pre_synapse_obj) + def parse_group(self, g): + self.log.debug("Parsing group: " + str(g)) + self.start_group(g) - self.log.debug("Cols: Id: %d precell: %d, postcell: %d, pre fract: %d, post fract: %d" % (indexId, indexPreCellId, indexPostCellId, indexPreFractAlong, indexPostFractAlong)) + # Note this ensures groups are parsed before datasets. Important for synapse props - self.log.debug("Extra cols: "+str(extraParamIndices) ) + # Ensure populations parsed first! + for node in g: + + if node._c_classid == "GROUP" and node._v_name.count("population_") >= 1: + self.log.debug("Sub node: " + str(node) + ", class: " + node._c_classid) + self.parse_group(node) + # Non populations! + for node in g: - for i in range(0, d.shape[0]): - row = d[i,:] + if node._c_classid == "GROUP" and node._v_name.count("population_") == 0: + self.log.debug( + "Sub node (ng): " + str(node) + ", class: " + node._c_classid + ) + self.parse_group(node) + + for node in g: + self.log.debug("Sub node: " + str(node) + ", class: " + node._c_classid) - id = int(row[indexId]) if indexId>0 else i - - preCellId = int(row[indexPreCellId]) + if self._is_dataset(node): + self.parse_dataset(node) - if indexPreSegId >= 0: - preSegId = int(row[indexPreSegId]) - if indexPreFractAlong >= 0: - preFractAlong = row[indexPreFractAlong] + self.end_group(g) - postCellId = int(row[indexPostCellId]) + def _extract_named_indices(self, d): - if indexPostSegId >= 0: - postSegId = int(row[indexPostSegId]) - if indexPostFractAlong >= 0: - postFractAlong = row[indexPostFractAlong] + named_indices = {} + for attrName in d.attrs._v_attrnames: - if indexWeight >= 0: - weight = row[indexWeight] + if "column_" in attrName: + val = d.attrs.__getattr__(attrName) + if isinstance(val, str): + name = val else: - weight=1 + name = val[0] + + index = int(attrName[len("column_") :]) + + named_indices[name] = index + + return named_indices + + def parse_dataset(self, d): + self.log.debug("Parsing dataset/array: " + str(d)) + + if self.currPopulation != "": + self.log.debug("Using data for population: " + self.currPopulation) + self.log.debug( + "Size is: " + + str(d.shape[0]) + + " rows of: " + + str(d.shape[1]) + + " entries" + ) + + if not self.optimized: + + indexId = -1 + indexX = -1 + indexY = -1 + indexZ = -1 + + for attrName in d.attrs._v_attrnames: + val = d.attrs.__getattr__(attrName) + + if val == "id" or val[0] == "id": + indexId = int(attrName[len("column_") :]) + if val == "x" or val[0] == "x": + indexX = int(attrName[len("column_") :]) + if val == "y" or val[0] == "y": + indexY = int(attrName[len("column_") :]) + if val == "z" or val[0] == "z": + indexZ = int(attrName[len("column_") :]) + + if indexId < 0: + if len(d[0]) == 4: + indexId = 0 + if indexX < 0: + if len(d[0]) == 3: + indexX = 0 + if len(d[0]) == 4: + indexX = 1 + if indexY < 0: + if len(d[0]) == 3: + indexY = 1 + if len(d[0]) == 4: + indexY = 2 + if indexZ < 0: + if len(d[0]) == 3: + indexZ = 2 + if len(d[0]) == 4: + indexY = 3 + + for i in range(0, d.shape[0]): + + self.netHandler.handle_location( + int(d[i, indexId]) if indexId >= 0 else i, + self.currPopulation, + self.currentComponent, + float(d[i, indexX]), + float(d[i, indexY]), + float(d[i, indexZ]), + ) + else: + + # TODO: a better way to convert??? + a = np.array(d) + self.currOptPopulation.instances = InstanceList( + array=a, indices=self._extract_named_indices(d) + ) + + elif self.currentProjectionId != "": + self.log.debug("Using data for proj: " + self.currentProjectionId) + self.log.debug( + "Size is: " + + str(d.shape[0]) + + " rows of: " + + str(d.shape[1]) + + " entries" + ) + + if not self.optimized: + + indexId = -1 + indexPreCellId = -1 + indexPreSegId = -1 + indexPreFractAlong = -1 + indexPostCellId = -1 + indexPostSegId = -1 + indexPostFractAlong = -1 + indexWeight = -1 + indexDelay = -1 + + id = -1 + preCellId = -1 + preSegId = 0 + preFractAlong = 0.5 + postCellId = -1 + postSegId = 0 + postFractAlong = 0.5 + + type = "projection" + + extraParamIndices = {} + + for attrName in d.attrs._v_attrnames: + val = d.attrs.__getattr__(attrName) + if not isinstance(val, str): + val = val.decode() + + # self.log.debug("Val of attribute: "+ attrName + " is "+ str(val)) + + if val == "id" or val[0] == "id": + indexId = int(attrName[len("column_") :]) + elif val == "pre_cell_id" or val[0] == "pre_cell_id": + indexPreCellId = int(attrName[len("column_") :]) + elif val == "pre_segment_id" or val[0] == "pre_segment_id": + indexPreSegId = int(attrName[len("column_") :]) + elif val == "pre_fraction_along" or val[0] == "pre_fraction_along": + indexPreFractAlong = int(attrName[len("column_") :]) + elif val == "post_cell_id" or val[0] == "post_cell_id": + indexPostCellId = int(attrName[len("column_") :]) + elif val == "post_segment_id" or val[0] == "post_segment_id": + indexPostSegId = int(attrName[len("column_") :]) + elif ( + val == "post_fraction_along" or val[0] == "post_fraction_along" + ): + indexPostFractAlong = int(attrName[len("column_") :]) + elif val == "weight" or val[0] == "weight": + indexWeight = int(attrName[len("column_") :]) + elif val == "delay" or val[0] == "delay": + indexDelay = int(attrName[len("column_") :]) + + if self.nml_doc_extra_elements: + synapse_obj = self.nml_doc_extra_elements.get_by_id( + self.currentSynapse + ) + + pre_synapse_obj = ( + self.nml_doc_extra_elements.get_by_id(self.currentPreSynapse) + if len(self.currentPreSynapse) > 0 + else None + ) - if indexDelay >= 0: - delay = row[indexDelay] else: - delay = 0 - - #self.log.debug("Connection %d from %f to %f" % (id, preCellId, postCellId)) - - - self.netHandler.handle_connection(self.currentProjectionId, \ - id, \ - self.currentProjectionPrePop, \ - self.currentProjectionPostPop, \ - self.currentSynapse, \ - preCellId, \ - postCellId, \ - preSegId, \ - preFractAlong, \ - postSegId, \ - postFractAlong, - delay=delay, - weight=weight) - else: - #TODO: a better way to convert??? - a = np.array(d) - self.currOptProjection.connections = ConnectionList(array=a,indices=self._extract_named_indices(d)) - - if self.currInputList!="": - self.log.debug("Using data for input list: "+ self.currInputList) - self.log.debug("Size is: "+str(d.shape[0])+" rows of: "+ str(d.shape[1])+ " entries") - - if not self.optimized: + synapse_obj = None + pre_synapse_obj = None + + self.netHandler.handle_projection( + self.currentProjectionId, + self.currentProjectionPrePop, + self.currentProjectionPostPop, + self.currentSynapse, + hasWeights=indexWeight > 0, + hasDelays=indexDelay > 0, + type=self.currentProjectionType, + synapse_obj=synapse_obj, + pre_synapse_obj=pre_synapse_obj, + ) + + self.log.debug( + "Cols: Id: %d precell: %d, postcell: %d, pre fract: %d, post fract: %d" + % ( + indexId, + indexPreCellId, + indexPostCellId, + indexPreFractAlong, + indexPostFractAlong, + ) + ) + + self.log.debug("Extra cols: " + str(extraParamIndices)) + + for i in range(0, d.shape[0]): + row = d[i, :] + + id = int(row[indexId]) if indexId > 0 else i + + preCellId = int(row[indexPreCellId]) + + if indexPreSegId >= 0: + preSegId = int(row[indexPreSegId]) + if indexPreFractAlong >= 0: + preFractAlong = row[indexPreFractAlong] + + postCellId = int(row[indexPostCellId]) + + if indexPostSegId >= 0: + postSegId = int(row[indexPostSegId]) + if indexPostFractAlong >= 0: + postFractAlong = row[indexPostFractAlong] + + if indexWeight >= 0: + weight = row[indexWeight] + else: + weight = 1 + + if indexDelay >= 0: + delay = row[indexDelay] + else: + delay = 0 + + # self.log.debug("Connection %d from %f to %f" % (id, preCellId, postCellId)) + + self.netHandler.handle_connection( + self.currentProjectionId, + id, + self.currentProjectionPrePop, + self.currentProjectionPostPop, + self.currentSynapse, + preCellId, + postCellId, + preSegId, + preFractAlong, + postSegId, + postFractAlong, + delay=delay, + weight=weight, + ) + else: + # TODO: a better way to convert??? + a = np.array(d) + self.currOptProjection.connections = ConnectionList( + array=a, indices=self._extract_named_indices(d) + ) + + if self.currInputList != "": + self.log.debug("Using data for input list: " + self.currInputList) + self.log.debug( + "Size is: " + + str(d.shape[0]) + + " rows of: " + + str(d.shape[1]) + + " entries" + ) + + if not self.optimized: + + indexId = -1 + indexTargetCellId = -1 + indexSegId = -1 + indexFractAlong = -1 + indexWeight = -1 + + segId = 0 + fractAlong = 0.5 + weight = 1 + + for attrName in d.attrs._v_attrnames: + val = d.attrs.__getattr__(attrName) + if not isinstance(val, str): + val = val.decode() + + self.log.debug("Val of attribute: " + attrName + " is " + str(val)) + + if val == "id" or val[0] == "id": + indexId = int(attrName[len("column_") :]) + elif val == "target_cell_id" or val[0] == "target_cell_id": + indexTargetCellId = int(attrName[len("column_") :]) + elif val == "segment_id" or val[0] == "segment_id": + indexSegId = int(attrName[len("column_") :]) + elif val == "fraction_along" or val[0] == "fraction_along": + indexFractAlong = int(attrName[len("column_") :]) + elif val == "weight" or val[0] == "weight": + indexWeight = int(attrName[len("column_") :]) + + for i in range(0, d.shape[0]): + + if indexId >= 0: + id_ = int(d[i, indexId]) + else: + id_ = i + + tid = int(d[i, indexTargetCellId]) + + if indexSegId >= 0: + segId = int(d[i, indexSegId]) + + if indexFractAlong >= 0: + fractAlong = float(d[i, indexFractAlong]) + + if indexWeight >= 0: + weight = float(d[i, indexWeight]) + + self.log.debug("Adding %s, %s, %s" % (tid, segId, fractAlong)) + + self.netHandler.handle_single_input( + self.currInputList, + id_, + tid, + segId=segId, + fract=fractAlong, + weight=weight, + ) + + self.netHandler.finalise_input_source(self.currInputList) - indexId = -1 - indexTargetCellId = -1 - indexSegId = -1 - indexFractAlong= -1 - indexWeight= -1 + else: - segId = 0 - fractAlong= 0.5 - weight=1 + # TODO: a better way to convert??? + a = np.array(d) + self.currOptInputList.input = InputsList( + array=a, indices=self._extract_named_indices(d) + ) + def _get_node_size(self, g, name): - for attrName in d.attrs._v_attrnames: - val = d.attrs.__getattr__(attrName) - if not isinstance(val, str): - val = val.decode() - - self.log.debug("Val of attribute: "+ attrName + " is "+ str(val)) - - if val == 'id' or val[0] == 'id': - indexId = int(attrName[len('column_'):]) - elif val == 'target_cell_id' or val[0] == 'target_cell_id': - indexTargetCellId = int(attrName[len('column_'):]) - elif val == 'segment_id' or val[0] == 'segment_id': - indexSegId = int(attrName[len('column_'):]) - elif val == 'fraction_along' or val[0] == 'fraction_along': - indexFractAlong = int(attrName[len('column_'):]) - elif val == 'weight' or val[0] == 'weight': - indexWeight = int(attrName[len('column_'):]) - - for i in range(0, d.shape[0]): - - if indexId >= 0: - id_ = int(d[i,indexId]) - else: - id_ = i - - tid = int(d[i,indexTargetCellId]) - - if indexSegId >= 0: - segId = int(d[i,indexSegId]) - - if indexFractAlong >= 0: - fractAlong = float(d[i,indexFractAlong]) - - if indexWeight >= 0: - weight = float(d[i,indexWeight]) - - self.log.debug("Adding %s, %s, %s"%(tid,segId,fractAlong)) - - self.netHandler.handle_single_input(self.currInputList, - id_, - tid, - segId = segId, - fract = fractAlong, - weight = weight) - - self.netHandler.finalise_input_source(self.currInputList) - - else: - - #TODO: a better way to convert??? - a = np.array(d) - self.currOptInputList.input = InputsList(array=a,indices=self._extract_named_indices(d)) - - def _get_node_size(self, g, name): - size = -1 # Peek ahead for size... for node in g: if self._is_dataset(node) and node.name == name: - size = node.shape[0] - + size = node.shape[0] + if size == -1: size = g._v_attrs.size - + return size - - - def start_group(self, g): - self.log.debug("Going into a group: "+ g._v_name) - - - if g._v_name == 'neuroml': - - if not self.optimized: - self.netHandler.handle_document_start(get_str_attribute_group(g,'id'), - get_str_attribute_group(g,'notes')) - else: - self.doc_id = get_str_attribute_group(g,'id') - self.doc_notes = get_str_attribute_group(g,'notes') - - if g._v_name == 'network': - - if not self.optimized: - self.netHandler.handle_network(get_str_attribute_group(g,'id'), - get_str_attribute_group(g,'notes'), - temperature=get_str_attribute_group(g,'temperature')) - else: - self.optimizedNetwork = NetworkContainer(id=get_str_attribute_group(g,'id'), - notes=get_str_attribute_group(g,'notes'), - temperature=get_str_attribute_group(g,'temperature')) - - if g._v_name.count('population_')>=1: - # TODO: a better check to see if the attribute is a str or numpy.ndarray - self.currPopulation = get_str_attribute_group(g,'id') - self.currentComponent = get_str_attribute_group(g,'component') - - size = self._get_node_size(g,self.currPopulation) - - properties = {} - for fname in g._v_attrs._v_attrnames: - if fname.startswith('property:'): - name = str(fname).split(':')[1] - properties[name] = get_str_attribute_group(g,fname) - - self.log.debug("Found a population: %s, component: %s, size: %s, properties: %s" % \ - (self.currPopulation,self.currentComponent,size,properties)) - - if not self.optimized: - if self.nml_doc_extra_elements: - component_obj = self.nml_doc_extra_elements.get_by_id(self.currentComponent) + + def start_group(self, g): + self.log.debug("Going into a group: " + g._v_name) + + if g._v_name == "neuroml": + + if not self.optimized: + self.netHandler.handle_document_start( + get_str_attribute_group(g, "id"), + get_str_attribute_group(g, "notes"), + ) else: - component_obj = None + self.doc_id = get_str_attribute_group(g, "id") + self.doc_notes = get_str_attribute_group(g, "notes") - # Try for Python3 - try: - args = inspect.getfullargspec(self.netHandler.handle_population)[0] - except AttributeError: - # Fall back for Python 2 - args = inspect.getargspec(self.netHandler.handle_population)[0] + if g._v_name == "network": - if 'properties' in args: - self.netHandler.handle_population(self.currPopulation, self.currentComponent, size, component_obj=component_obj, properties=properties) + if not self.optimized: + self.netHandler.handle_network( + get_str_attribute_group(g, "id"), + get_str_attribute_group(g, "notes"), + temperature=get_str_attribute_group(g, "temperature"), + ) else: - self.netHandler.handle_population(self.currPopulation, self.currentComponent, size, component_obj=component_obj) - else: - self.currOptPopulation = PopulationContainer(id=self.currPopulation, component=self.currentComponent, size=size) - - for p in properties: - self.currOptPopulation.properties.append(neuroml.Property(p,properties[p])) - - self.optimizedNetwork.populations.append(self.currOptPopulation) - - if g._v_name.count('projection_')>=1: - - self.currentProjectionId = get_str_attribute_group(g,'id') - pt = get_str_attribute_group(g,'type') - self.currentProjectionType = pt if pt else "projection" - self.currentProjectionPrePop = get_str_attribute_group(g,'presynapticPopulation') - self.currentProjectionPostPop = get_str_attribute_group(g,'postsynapticPopulation') - - if "synapse" in g._v_attrs: - self.currentSynapse = get_str_attribute_group(g,'synapse') - elif "postComponent" in g._v_attrs: - self.currentSynapse = get_str_attribute_group(g,'postComponent') - - if "preComponent" in g._v_attrs: - self.currentPreSynapse = get_str_attribute_group(g,'preComponent') - - - if not self.optimized: - self.log.debug("------ Found a projection: %s, from %s to %s through %s" %(self.currentProjectionId,self.currentProjectionPrePop,self.currentProjectionPostPop,self.currentSynapse)) - else: - - if self.currentProjectionType=='electricalProjection': - raise Exception("Cannot yet export electricalProjections to optimized HDF5 format") - self.currOptProjection = ElectricalProjectionContainer(id=self.currentProjectionId, - presynaptic_population=self.currentProjectionPrePop, - postsynaptic_population=self.currentProjectionPostPop) - - self.optimizedNetwork.electrical_projections.append(self.currOptProjection) - - elif self.currentProjectionType=='continuousProjection': - - raise Exception("Cannot yet export continuousProjections to optimized HDF5 format") - - self.optimizedNetwork.continuous_projections.append(self.currOptProjection) - + self.optimizedNetwork = NetworkContainer( + id=get_str_attribute_group(g, "id"), + notes=get_str_attribute_group(g, "notes"), + temperature=get_str_attribute_group(g, "temperature"), + ) + + if g._v_name.count("population_") >= 1: + # TODO: a better check to see if the attribute is a str or numpy.ndarray + self.currPopulation = get_str_attribute_group(g, "id") + self.currentComponent = get_str_attribute_group(g, "component") + + size = self._get_node_size(g, self.currPopulation) + + properties = {} + for fname in g._v_attrs._v_attrnames: + if fname.startswith("property:"): + name = str(fname).split(":")[1] + properties[name] = get_str_attribute_group(g, fname) + + self.log.debug( + "Found a population: %s, component: %s, size: %s, properties: %s" + % (self.currPopulation, self.currentComponent, size, properties) + ) + + if not self.optimized: + if self.nml_doc_extra_elements: + component_obj = self.nml_doc_extra_elements.get_by_id( + self.currentComponent + ) + else: + component_obj = None + + # Try for Python3 + try: + args = inspect.getfullargspec(self.netHandler.handle_population)[0] + except AttributeError: + # Fall back for Python 2 + args = inspect.getargspec(self.netHandler.handle_population)[0] + + if "properties" in args: + self.netHandler.handle_population( + self.currPopulation, + self.currentComponent, + size, + component_obj=component_obj, + properties=properties, + ) + else: + self.netHandler.handle_population( + self.currPopulation, + self.currentComponent, + size, + component_obj=component_obj, + ) else: - self.currOptProjection = ProjectionContainer(id=self.currentProjectionId, - presynaptic_population=self.currentProjectionPrePop, - postsynaptic_population=self.currentProjectionPostPop, - synapse=self.currentSynapse) - - self.optimizedNetwork.projections.append(self.currOptProjection) - - - if g._v_name.count('inputList_')>=1 or g._v_name.count('input_list_')>=1: # inputList_ preferred - # TODO: a better check to see if the attribute is a str or numpy.ndarray - self.currInputList = get_str_attribute_group(g,'id') - component = get_str_attribute_group(g,'component') - population = get_str_attribute_group(g,'population') - - size = self._get_node_size(g,self.currInputList) - - - if not self.optimized: - if self.nml_doc_extra_elements: - input_comp_obj = self.nml_doc_extra_elements.get_by_id(component) + self.currOptPopulation = PopulationContainer( + id=self.currPopulation, component=self.currentComponent, size=size + ) + + for p in properties: + self.currOptPopulation.properties.append( + neuroml.Property(p, properties[p]) + ) + + self.optimizedNetwork.populations.append(self.currOptPopulation) + + if g._v_name.count("projection_") >= 1: + + self.currentProjectionId = get_str_attribute_group(g, "id") + pt = get_str_attribute_group(g, "type") + self.currentProjectionType = pt if pt else "projection" + self.currentProjectionPrePop = get_str_attribute_group( + g, "presynapticPopulation" + ) + self.currentProjectionPostPop = get_str_attribute_group( + g, "postsynapticPopulation" + ) + + if "synapse" in g._v_attrs: + self.currentSynapse = get_str_attribute_group(g, "synapse") + elif "postComponent" in g._v_attrs: + self.currentSynapse = get_str_attribute_group(g, "postComponent") + + if "preComponent" in g._v_attrs: + self.currentPreSynapse = get_str_attribute_group(g, "preComponent") + + if not self.optimized: + self.log.debug( + "------ Found a projection: %s, from %s to %s through %s" + % ( + self.currentProjectionId, + self.currentProjectionPrePop, + self.currentProjectionPostPop, + self.currentSynapse, + ) + ) else: - input_comp_obj = None - self.log.debug("Found an inputList: "+ self.currInputList+", component: "+component+", population: "+population+", size: "+ str(size)) + if self.currentProjectionType == "electricalProjection": + raise Exception( + "Cannot yet export electricalProjections to optimized HDF5 format" + ) + self.currOptProjection = ElectricalProjectionContainer( + id=self.currentProjectionId, + presynaptic_population=self.currentProjectionPrePop, + postsynaptic_population=self.currentProjectionPostPop, + ) - self.netHandler.handle_input_list(self.currInputList, population, component, size, input_comp_obj=input_comp_obj) - else: - self.currOptInputList = InputListContainer(id=self.currInputList, component=component, populations=population) - self.optimizedNetwork.input_lists.append(self.currOptInputList) - - - def end_group(self, g): - self.log.debug("Coming out of a group: "+ str(g)) - - if g._v_name.count('population_')>=1: - self.log.debug("End of population: "+ self.currPopulation+", cell type: "+self.currentComponent) - self.currPopulation ="" - self.currentComponent = "" - - if g._v_name.count('projection_')>=1: - - if not self.optimized: - self.netHandler.finalise_projection(self.currentProjectionId, - self.currentProjectionPrePop, - self.currentProjectionPostPop, - synapse=self.currentSynapse, - type=self.currentProjectionType) - - self.currentProjectionId = "" - self.currentProjectionType = "" - self.currentProjectionPrePop = "" - self.currentProjectionPostPop = "" - self.currentSynapse = "" - self.currentPreSynapse = "" - - if g._v_name.count('inputList_')>=1 or g._v_name.count('input_list_')>=1: - self.currInputList ="" - - - - -if __name__ == '__main__': - - - file_name = '../examples/test_files/complete.nml.h5' - #file_name = '../../../neuroConstruct/osb/showcase/NetPyNEShowcase/NeuroML2/scaling/Balanced.net.nml.h5' - - logging.basicConfig(level=logging.DEBUG, format="%(name)-19s %(levelname)-5s - %(message)s") + self.optimizedNetwork.electrical_projections.append( + self.currOptProjection + ) + + elif self.currentProjectionType == "continuousProjection": + + raise Exception( + "Cannot yet export continuousProjections to optimized HDF5 format" + ) + + self.optimizedNetwork.continuous_projections.append( + self.currOptProjection + ) + + else: + self.currOptProjection = ProjectionContainer( + id=self.currentProjectionId, + presynaptic_population=self.currentProjectionPrePop, + postsynaptic_population=self.currentProjectionPostPop, + synapse=self.currentSynapse, + ) + + self.optimizedNetwork.projections.append(self.currOptProjection) + + if ( + g._v_name.count("inputList_") >= 1 or g._v_name.count("input_list_") >= 1 + ): # inputList_ preferred + # TODO: a better check to see if the attribute is a str or numpy.ndarray + self.currInputList = get_str_attribute_group(g, "id") + component = get_str_attribute_group(g, "component") + population = get_str_attribute_group(g, "population") + + size = self._get_node_size(g, self.currInputList) + + if not self.optimized: + if self.nml_doc_extra_elements: + input_comp_obj = self.nml_doc_extra_elements.get_by_id(component) + else: + input_comp_obj = None + + self.log.debug( + "Found an inputList: " + + self.currInputList + + ", component: " + + component + + ", population: " + + population + + ", size: " + + str(size) + ) + + self.netHandler.handle_input_list( + self.currInputList, + population, + component, + size, + input_comp_obj=input_comp_obj, + ) + else: + self.currOptInputList = InputListContainer( + id=self.currInputList, component=component, populations=population + ) + self.optimizedNetwork.input_lists.append(self.currOptInputList) + + def end_group(self, g): + self.log.debug("Coming out of a group: " + str(g)) + + if g._v_name.count("population_") >= 1: + self.log.debug( + "End of population: " + + self.currPopulation + + ", cell type: " + + self.currentComponent + ) + self.currPopulation = "" + self.currentComponent = "" + + if g._v_name.count("projection_") >= 1: + + if not self.optimized: + self.netHandler.finalise_projection( + self.currentProjectionId, + self.currentProjectionPrePop, + self.currentProjectionPostPop, + synapse=self.currentSynapse, + type=self.currentProjectionType, + ) + + self.currentProjectionId = "" + self.currentProjectionType = "" + self.currentProjectionPrePop = "" + self.currentProjectionPostPop = "" + self.currentSynapse = "" + self.currentPreSynapse = "" + + if g._v_name.count("inputList_") >= 1 or g._v_name.count("input_list_") >= 1: + self.currInputList = "" + + +if __name__ == "__main__": + + file_name = "../examples/test_files/complete.nml.h5" + # file_name = '../../../neuroConstruct/osb/showcase/NetPyNEShowcase/NeuroML2/scaling/Balanced.net.nml.h5' + + logging.basicConfig( + level=logging.DEBUG, format="%(name)-19s %(levelname)-5s - %(message)s" + ) from neuroml.hdf5.DefaultNetworkHandler import DefaultNetworkHandler - nmlHandler = DefaultNetworkHandler() + nmlHandler = DefaultNetworkHandler() - currParser = NeuroMLHdf5Parser(nmlHandler) # The HDF5 handler knows of the structure of NetworkML and calls appropriate functions in NetworkHandler + currParser = NeuroMLHdf5Parser( + nmlHandler + ) # The HDF5 handler knows of the structure of NetworkML and calls appropriate functions in NetworkHandler currParser.parse(file_name) - - print('-------------------------------\n\n') - + + print("-------------------------------\n\n") + from neuroml.hdf5.NetworkBuilder import NetworkBuilder - nmlHandler = NetworkBuilder() + nmlHandler = NetworkBuilder() + + currParser = NeuroMLHdf5Parser(nmlHandler) - currParser = NeuroMLHdf5Parser(nmlHandler) - currParser.parse(file_name) - + nml_doc = currParser.get_nml_doc() summary0 = nml_doc.summary() print(summary0) exit() - print('-------------------------------\n\n') - - currParser = NeuroMLHdf5Parser(None,optimized=True) - + print("-------------------------------\n\n") + + currParser = NeuroMLHdf5Parser(None, optimized=True) + currParser.parse(file_name) - + nml_doc = currParser.get_nml_doc() print(nml_doc.summary()) - - - - - - - diff --git a/neuroml/hdf5/NeuroMLXMLParser.py b/neuroml/hdf5/NeuroMLXMLParser.py index 43160e1e..1958b118 100644 --- a/neuroml/hdf5/NeuroMLXMLParser.py +++ b/neuroml/hdf5/NeuroMLXMLParser.py @@ -2,7 +2,7 @@ # # A class to handle XML based NeuroML files. # Calls the appropriate methods in DefaultNetworkHandler when cell locations, -# network connections are found. The NetworkHandler can either print +# network connections are found. The NetworkHandler can either print # information, or if it's a class overriding NetworkHandler can create # the appropriate network in a simulator dependent fashion # @@ -10,428 +10,525 @@ # Author: Padraig Gleeson # # - + import logging import inspect -class NeuroMLXMLParser(): - - log = logging.getLogger("NeuroMLXMLParser") - - currPopulation = "" - currentComponent = "" - totalInstances = 0 - - currentProjectionId = "" - currentProjectionPrePop = "" - currentProjectionPostPop = "" - currentSynapse = "" - - - def __init__ (self, netHandler): - - self.netHandler = netHandler - - # For continued use with old API - if not hasattr(self.netHandler,'handle_network') or hasattr(self.netHandler,'handleNetwork'): - self.netHandler.handle_network = self.netHandler.handleNetwork - if not hasattr(self.netHandler,'handle_document_start') or hasattr(self.netHandler,'handleDocumentStart'): - self.netHandler.handle_document_start = self.netHandler.handleDocumentStart - if not hasattr(self.netHandler,'handle_population') or hasattr(self.netHandler,'handlePopulation'): - self.netHandler.handle_population = self.netHandler.handlePopulation - if not hasattr(self.netHandler,'handle_location') or hasattr(self.netHandler,'handleLocation'): - self.netHandler.handle_location = self.netHandler.handleLocation - - if not hasattr(self.netHandler,'handle_projection') or hasattr(self.netHandler,'handleProjection'): - self.netHandler.handle_projection = self.netHandler.handleProjection - if not hasattr(self.netHandler,'finalise_projection') or hasattr(self.netHandler,'finaliseProjection'): - self.netHandler.finalise_projection = self.netHandler.finaliseProjection - - if not hasattr(self.netHandler,'handle_connection') or hasattr(self.netHandler,'handleConnection'): - self.netHandler.handle_connection = self.netHandler.handleConnection - if not hasattr(self.netHandler,'handle_input_list') or hasattr(self.netHandler,'handleInputList'): - self.netHandler.handle_input_list = self.netHandler.handleInputList - if not hasattr(self.netHandler,'handle_single_input') or hasattr(self.netHandler,'handleSingleInput'): - self.netHandler.handle_single_input = self.netHandler.handleSingleInput - if not hasattr(self.netHandler,'finalise_input_source') or hasattr(self.netHandler,'finaliseInputSource'): - self.netHandler.finalise_input_source = self.netHandler.finaliseInputSource - - - - - def _parse_delay(self, delay_string): - if delay_string.endswith('ms'): - return float(delay_string[:-2].strip()) - elif delay_string.endswith('s'): - return float(delay_string[:-1].strip())*1000.0 - else: - print("Can't parse string for delay: %s"%delay_string) - exit(1) - - def parse(self, filename): - - print("Parsing: %s"%filename) - - import neuroml.loaders as loaders - - self.nml_doc = loaders.read_neuroml2_file(filename, - include_includes=True, - already_included=[]) - - print("Loaded: %s as NeuroMLDocument"%filename) - - self.netHandler.handle_document_start(self.nml_doc.id,self.nml_doc.notes) - - for network in self.nml_doc.networks: - - self.netHandler.handle_network(network.id,network.notes, temperature=network.temperature) - - for population in network.populations: - - component_obj = self.nml_doc.get_by_id(population.component) - properties = {} - for p in population.properties: - properties[p.tag]=p.value - - if len(population.instances)>0 and population.type=='populationList': - - # Try for Python3 - try: - args = inspect.getfullargspec(self.netHandler.handle_population)[0] - except AttributeError: - # Fall back for Python 2 - args = inspect.getargspec(self.netHandler.handle_population)[0] - - if 'properties' in args: - self.netHandler.handle_population(population.id, - population.component, - len(population.instances), - component_obj=component_obj, - properties=properties) - else: - self.netHandler.handle_population(population.id, - population.component, - len(population.instances), - component_obj=component_obj) - - for inst in population.instances: - - loc = inst.location - self.netHandler.handle_location(inst.id, \ - population.id, \ - population.component, \ - loc.x, \ - loc.y, \ - loc.z) - else: - # Try for Python3 - try: - args = inspect.getfullargspec(self.netHandler.handle_population)[0] - except AttributeError: - # Fall back for Python 2 - args = inspect.getargspec(self.netHandler.handle_population)[0] - - if 'properties' in args: - self.netHandler.handle_population(population.id, - population.component, - population.size, - component_obj=component_obj, - properties=properties) + +class NeuroMLXMLParser: + + log = logging.getLogger("NeuroMLXMLParser") + + currPopulation = "" + currentComponent = "" + totalInstances = 0 + + currentProjectionId = "" + currentProjectionPrePop = "" + currentProjectionPostPop = "" + currentSynapse = "" + + def __init__(self, netHandler): + + self.netHandler = netHandler + + # For continued use with old API + if not hasattr(self.netHandler, "handle_network") or hasattr( + self.netHandler, "handleNetwork" + ): + self.netHandler.handle_network = self.netHandler.handleNetwork + if not hasattr(self.netHandler, "handle_document_start") or hasattr( + self.netHandler, "handleDocumentStart" + ): + self.netHandler.handle_document_start = self.netHandler.handleDocumentStart + if not hasattr(self.netHandler, "handle_population") or hasattr( + self.netHandler, "handlePopulation" + ): + self.netHandler.handle_population = self.netHandler.handlePopulation + if not hasattr(self.netHandler, "handle_location") or hasattr( + self.netHandler, "handleLocation" + ): + self.netHandler.handle_location = self.netHandler.handleLocation + + if not hasattr(self.netHandler, "handle_projection") or hasattr( + self.netHandler, "handleProjection" + ): + self.netHandler.handle_projection = self.netHandler.handleProjection + if not hasattr(self.netHandler, "finalise_projection") or hasattr( + self.netHandler, "finaliseProjection" + ): + self.netHandler.finalise_projection = self.netHandler.finaliseProjection + + if not hasattr(self.netHandler, "handle_connection") or hasattr( + self.netHandler, "handleConnection" + ): + self.netHandler.handle_connection = self.netHandler.handleConnection + if not hasattr(self.netHandler, "handle_input_list") or hasattr( + self.netHandler, "handleInputList" + ): + self.netHandler.handle_input_list = self.netHandler.handleInputList + if not hasattr(self.netHandler, "handle_single_input") or hasattr( + self.netHandler, "handleSingleInput" + ): + self.netHandler.handle_single_input = self.netHandler.handleSingleInput + if not hasattr(self.netHandler, "finalise_input_source") or hasattr( + self.netHandler, "finaliseInputSource" + ): + self.netHandler.finalise_input_source = self.netHandler.finaliseInputSource + + def _parse_delay(self, delay_string): + if delay_string.endswith("ms"): + return float(delay_string[:-2].strip()) + elif delay_string.endswith("s"): + return float(delay_string[:-1].strip()) * 1000.0 + else: + print("Can't parse string for delay: %s" % delay_string) + exit(1) + + def parse(self, filename): + + print("Parsing: %s" % filename) + + import neuroml.loaders as loaders + + self.nml_doc = loaders.read_neuroml2_file( + filename, include_includes=True, already_included=[] + ) + + print("Loaded: %s as NeuroMLDocument" % filename) + + self.netHandler.handle_document_start(self.nml_doc.id, self.nml_doc.notes) + + for network in self.nml_doc.networks: + + self.netHandler.handle_network( + network.id, network.notes, temperature=network.temperature + ) + + for population in network.populations: + + component_obj = self.nml_doc.get_by_id(population.component) + properties = {} + for p in population.properties: + properties[p.tag] = p.value + + if ( + len(population.instances) > 0 + and population.type == "populationList" + ): + + # Try for Python3 + try: + args = inspect.getfullargspec( + self.netHandler.handle_population + )[0] + except AttributeError: + # Fall back for Python 2 + args = inspect.getargspec(self.netHandler.handle_population)[0] + + if "properties" in args: + self.netHandler.handle_population( + population.id, + population.component, + len(population.instances), + component_obj=component_obj, + properties=properties, + ) + else: + self.netHandler.handle_population( + population.id, + population.component, + len(population.instances), + component_obj=component_obj, + ) + + for inst in population.instances: + + loc = inst.location + self.netHandler.handle_location( + inst.id, + population.id, + population.component, + loc.x, + loc.y, + loc.z, + ) else: - self.netHandler.handle_population(population.id, - population.component, - population.size, - component_obj=component_obj) - - for i in range(population.size): - self.netHandler.handle_location(i, \ - population.id, \ - population.component, \ - None, \ - None, \ - None) - - for projection in network.projections: - - synapse_obj = self.nml_doc.get_by_id(projection.synapse) - - self.netHandler.handle_projection(projection.id, - projection.presynaptic_population, - projection.postsynaptic_population, - projection.synapse, - synapse_obj=synapse_obj) - - for connection in projection.connections: - - self.netHandler.handle_connection(projection.id, \ - connection.id, \ - projection.presynaptic_population, - projection.postsynaptic_population, - projection.synapse, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment_id), \ - postSegId=int(connection.post_segment_id), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along), \ - delay=0, - weight=1) - - for connection in projection.connection_wds: - - self.netHandler.handle_connection(projection.id, \ - connection.id, \ - projection.presynaptic_population, - projection.postsynaptic_population, - projection.synapse, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment_id), \ - postSegId=int(connection.post_segment_id), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along), \ - delay=self._parse_delay(connection.delay), - weight=float(connection.weight)) - - - for ep in network.electrical_projections: - - synapse = None - - for connection in ep.electrical_connections: - if synapse != None and synapse != connection.synapse: - raise Exception("There are different synapses for connections inside: %s!"%ep) - synapse = connection.synapse - for connection in ep.electrical_connection_instances: - if synapse != None and synapse != connection.synapse: - raise Exception("There are different synapses for connections inside: %s!"%ep) - synapse = connection.synapse - for connection in ep.electrical_connection_instance_ws: - if synapse != None and synapse != connection.synapse: - raise Exception("There are different synapses for connections inside: %s!"%ep) - synapse = connection.synapse - - synapse_obj = self.nml_doc.get_by_id(synapse) - - self.netHandler.handle_projection(ep.id, - ep.presynaptic_population, - ep.postsynaptic_population, - synapse, - type="electricalProjection", - synapse_obj=synapse_obj) - - for connection in ep.electrical_connections: - - self.netHandler.handle_connection(ep.id, \ - connection.id, \ - ep.presynaptic_population, - ep.postsynaptic_population, - connection.synapse, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment), \ - postSegId=int(connection.post_segment), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along)) - - for connection in ep.electrical_connection_instances: - - self.netHandler.handle_connection(ep.id, \ - connection.id, \ - ep.presynaptic_population, - ep.postsynaptic_population, - connection.synapse, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment), \ - postSegId=int(connection.post_segment), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along)) - - for connection in ep.electrical_connection_instance_ws: - - self.netHandler.handle_connection(ep.id, \ - connection.id, \ - ep.presynaptic_population, - ep.postsynaptic_population, - connection.synapse, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment), \ - postSegId=int(connection.post_segment), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along), - weight=connection.get_weight()) - - for cp in network.continuous_projections: - - pre_comp = None - post_comp = None - - - for connection in cp.continuous_connections: - if pre_comp != None and pre_comp != connection.pre_component: - raise Exception("There are different pre components for connections inside: %s!"%cp) - pre_comp = connection.pre_component - if post_comp != None and post_comp != connection.post_component: - raise Exception("There are different post components for connections inside: %s!"%cp) - post_comp = connection.post_component - - for connection in cp.continuous_connection_instances: - if pre_comp != None and pre_comp != connection.pre_component: - raise Exception("There are different pre components for connections inside: %s!"%cp) - pre_comp = connection.pre_component - if post_comp != None and post_comp != connection.post_component: - raise Exception("There are different post components for connections inside: %s!"%cp) - post_comp = connection.post_component - - for connection in cp.continuous_connection_instance_ws: - if pre_comp != None and pre_comp != connection.pre_component: - raise Exception("There are different pre components for connections inside: %s!"%cp) - pre_comp = connection.pre_component - if post_comp != None and post_comp != connection.post_component: - raise Exception("There are different post components for connections inside: %s!"%cp) - post_comp = connection.post_component - - pre_obj = self.nml_doc.get_by_id(pre_comp) - post_obj = self.nml_doc.get_by_id(post_comp) - - synapse = pre_comp - - self.netHandler.handle_projection(cp.id, - cp.presynaptic_population, - cp.postsynaptic_population, - synapse, - type="continuousProjection", - pre_synapse_obj=pre_obj, - synapse_obj=post_obj) - - for connection in cp.continuous_connections: - - self.netHandler.handle_connection(cp.id, \ - connection.id, \ - cp.presynaptic_population, - cp.postsynaptic_population, - synapseType=None, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment), \ - postSegId=int(connection.post_segment), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along)) - - for connection in cp.continuous_connection_instances: - - self.netHandler.handle_connection(cp.id, \ - connection.id, \ - cp.presynaptic_population, - cp.postsynaptic_population, - synapseType=None, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment), \ - postSegId=int(connection.post_segment), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along)) - - for connection in cp.continuous_connection_instance_ws: - - self.netHandler.handle_connection(cp.id, \ - connection.id, \ - cp.presynaptic_population, - cp.postsynaptic_population, - synapseType=None, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment), \ - postSegId=int(connection.post_segment), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along), - weight=connection.get_weight()) - - - for input_list in network.input_lists: - - input_comp_obj = self.nml_doc.get_by_id(input_list.component) - - self.netHandler.handle_input_list(input_list.id, input_list.populations, input_list.component, len(input_list.input),input_comp_obj=input_comp_obj) - - for input in input_list.input: - - self.netHandler.handle_single_input(input_list.id, - input.id, - cellId = input.get_target_cell_id(), - segId = input.get_segment_id(), - fract = input.get_fraction_along()) - for input_w in input_list.input_ws: - - self.netHandler.handle_single_input(input_list.id, - input_w.id, - cellId = input_w.get_target_cell_id(), - segId = input_w.get_segment_id(), - fract = input_w.get_fraction_along(), - weight = input_w.get_weight()) - - self.netHandler.finalise_input_source(input_list.id) - - for explicitInput in network.explicit_inputs: - - list_name = 'INPUT_%s_%s'%(explicitInput.input,explicitInput.target.replace('[','_').replace(']','_')) - if list_name.endswith('_'): list_name=list_name[:-1] - - pop = explicitInput.target.split('[')[0] - - input_comp_obj = self.nml_doc.get_by_id(explicitInput.input) - - self.netHandler.handle_input_list(list_name, pop, explicitInput.input, 1,input_comp_obj=input_comp_obj) - - self.netHandler.handle_single_input(list_name, - 0, - cellId = explicitInput.get_target_cell_id(), - segId = 0, - fract = 0.5) - - self.netHandler.finalise_input_source(list_name) - - - -if __name__ == '__main__': - - - file_name = '../examples/tmp/testh5.nml' - - logging.basicConfig(level=logging.INFO, format="%(name)-19s %(levelname)-5s - %(message)s") + # Try for Python3 + try: + args = inspect.getfullargspec( + self.netHandler.handle_population + )[0] + except AttributeError: + # Fall back for Python 2 + args = inspect.getargspec(self.netHandler.handle_population)[0] + + if "properties" in args: + self.netHandler.handle_population( + population.id, + population.component, + population.size, + component_obj=component_obj, + properties=properties, + ) + else: + self.netHandler.handle_population( + population.id, + population.component, + population.size, + component_obj=component_obj, + ) + + for i in range(population.size): + self.netHandler.handle_location( + i, population.id, population.component, None, None, None + ) + + for projection in network.projections: + + synapse_obj = self.nml_doc.get_by_id(projection.synapse) + + self.netHandler.handle_projection( + projection.id, + projection.presynaptic_population, + projection.postsynaptic_population, + projection.synapse, + synapse_obj=synapse_obj, + ) + + for connection in projection.connections: + + self.netHandler.handle_connection( + projection.id, + connection.id, + projection.presynaptic_population, + projection.postsynaptic_population, + projection.synapse, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment_id), + postSegId=int(connection.post_segment_id), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + delay=0, + weight=1, + ) + + for connection in projection.connection_wds: + + self.netHandler.handle_connection( + projection.id, + connection.id, + projection.presynaptic_population, + projection.postsynaptic_population, + projection.synapse, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment_id), + postSegId=int(connection.post_segment_id), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + delay=self._parse_delay(connection.delay), + weight=float(connection.weight), + ) + + for ep in network.electrical_projections: + + synapse = None + + for connection in ep.electrical_connections: + if synapse != None and synapse != connection.synapse: + raise Exception( + "There are different synapses for connections inside: %s!" + % ep + ) + synapse = connection.synapse + for connection in ep.electrical_connection_instances: + if synapse != None and synapse != connection.synapse: + raise Exception( + "There are different synapses for connections inside: %s!" + % ep + ) + synapse = connection.synapse + for connection in ep.electrical_connection_instance_ws: + if synapse != None and synapse != connection.synapse: + raise Exception( + "There are different synapses for connections inside: %s!" + % ep + ) + synapse = connection.synapse + + synapse_obj = self.nml_doc.get_by_id(synapse) + + self.netHandler.handle_projection( + ep.id, + ep.presynaptic_population, + ep.postsynaptic_population, + synapse, + type="electricalProjection", + synapse_obj=synapse_obj, + ) + + for connection in ep.electrical_connections: + + self.netHandler.handle_connection( + ep.id, + connection.id, + ep.presynaptic_population, + ep.postsynaptic_population, + connection.synapse, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment), + postSegId=int(connection.post_segment), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + ) + + for connection in ep.electrical_connection_instances: + + self.netHandler.handle_connection( + ep.id, + connection.id, + ep.presynaptic_population, + ep.postsynaptic_population, + connection.synapse, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment), + postSegId=int(connection.post_segment), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + ) + + for connection in ep.electrical_connection_instance_ws: + + self.netHandler.handle_connection( + ep.id, + connection.id, + ep.presynaptic_population, + ep.postsynaptic_population, + connection.synapse, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment), + postSegId=int(connection.post_segment), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + weight=connection.get_weight(), + ) + + for cp in network.continuous_projections: + + pre_comp = None + post_comp = None + + for connection in cp.continuous_connections: + if pre_comp != None and pre_comp != connection.pre_component: + raise Exception( + "There are different pre components for connections inside: %s!" + % cp + ) + pre_comp = connection.pre_component + if post_comp != None and post_comp != connection.post_component: + raise Exception( + "There are different post components for connections inside: %s!" + % cp + ) + post_comp = connection.post_component + + for connection in cp.continuous_connection_instances: + if pre_comp != None and pre_comp != connection.pre_component: + raise Exception( + "There are different pre components for connections inside: %s!" + % cp + ) + pre_comp = connection.pre_component + if post_comp != None and post_comp != connection.post_component: + raise Exception( + "There are different post components for connections inside: %s!" + % cp + ) + post_comp = connection.post_component + + for connection in cp.continuous_connection_instance_ws: + if pre_comp != None and pre_comp != connection.pre_component: + raise Exception( + "There are different pre components for connections inside: %s!" + % cp + ) + pre_comp = connection.pre_component + if post_comp != None and post_comp != connection.post_component: + raise Exception( + "There are different post components for connections inside: %s!" + % cp + ) + post_comp = connection.post_component + + pre_obj = self.nml_doc.get_by_id(pre_comp) + post_obj = self.nml_doc.get_by_id(post_comp) + + synapse = pre_comp + + self.netHandler.handle_projection( + cp.id, + cp.presynaptic_population, + cp.postsynaptic_population, + synapse, + type="continuousProjection", + pre_synapse_obj=pre_obj, + synapse_obj=post_obj, + ) + + for connection in cp.continuous_connections: + + self.netHandler.handle_connection( + cp.id, + connection.id, + cp.presynaptic_population, + cp.postsynaptic_population, + synapseType=None, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment), + postSegId=int(connection.post_segment), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + ) + + for connection in cp.continuous_connection_instances: + + self.netHandler.handle_connection( + cp.id, + connection.id, + cp.presynaptic_population, + cp.postsynaptic_population, + synapseType=None, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment), + postSegId=int(connection.post_segment), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + ) + + for connection in cp.continuous_connection_instance_ws: + + self.netHandler.handle_connection( + cp.id, + connection.id, + cp.presynaptic_population, + cp.postsynaptic_population, + synapseType=None, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment), + postSegId=int(connection.post_segment), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + weight=connection.get_weight(), + ) + + for input_list in network.input_lists: + + input_comp_obj = self.nml_doc.get_by_id(input_list.component) + + self.netHandler.handle_input_list( + input_list.id, + input_list.populations, + input_list.component, + len(input_list.input), + input_comp_obj=input_comp_obj, + ) + + for input in input_list.input: + + self.netHandler.handle_single_input( + input_list.id, + input.id, + cellId=input.get_target_cell_id(), + segId=input.get_segment_id(), + fract=input.get_fraction_along(), + ) + for input_w in input_list.input_ws: + + self.netHandler.handle_single_input( + input_list.id, + input_w.id, + cellId=input_w.get_target_cell_id(), + segId=input_w.get_segment_id(), + fract=input_w.get_fraction_along(), + weight=input_w.get_weight(), + ) + + self.netHandler.finalise_input_source(input_list.id) + + for explicitInput in network.explicit_inputs: + + list_name = "INPUT_%s_%s" % ( + explicitInput.input, + explicitInput.target.replace("[", "_").replace("]", "_"), + ) + if list_name.endswith("_"): + list_name = list_name[:-1] + + pop = explicitInput.target.split("[")[0] + + input_comp_obj = self.nml_doc.get_by_id(explicitInput.input) + + self.netHandler.handle_input_list( + list_name, + pop, + explicitInput.input, + 1, + input_comp_obj=input_comp_obj, + ) + + self.netHandler.handle_single_input( + list_name, + 0, + cellId=explicitInput.get_target_cell_id(), + segId=0, + fract=0.5, + ) + + self.netHandler.finalise_input_source(list_name) + + +if __name__ == "__main__": + + file_name = "../examples/tmp/testh5.nml" + + logging.basicConfig( + level=logging.INFO, format="%(name)-19s %(levelname)-5s - %(message)s" + ) from neuroml.hdf5.DefaultNetworkHandler import DefaultNetworkHandler - nmlHandler = DefaultNetworkHandler() + nmlHandler = DefaultNetworkHandler() - currParser = NeuroMLXMLParser(nmlHandler) # The HDF5 handler knows of the structure of NetworkML and calls appropriate functions in NetworkHandler + currParser = NeuroMLXMLParser( + nmlHandler + ) # The HDF5 handler knows of the structure of NetworkML and calls appropriate functions in NetworkHandler currParser.parse(file_name) - - print('-------------------------------\n\n') - + + print("-------------------------------\n\n") + from neuroml.hdf5.NetworkBuilder import NetworkBuilder - nmlHandler = NetworkBuilder() + nmlHandler = NetworkBuilder() + + currParser = NeuroMLXMLParser(nmlHandler) - currParser = NeuroMLXMLParser(nmlHandler) - currParser.parse(file_name) - + nml_doc = nmlHandler.get_nml_doc() - + print(nml_doc.summary()) - nml_file = '../examples/tmp/testh5_2_.nml' + nml_file = "../examples/tmp/testh5_2_.nml" import neuroml.writers as writers + writers.NeuroMLWriter.write(nml_doc, nml_file) - print("Written network file to: "+nml_file) - - - - - - - + print("Written network file to: " + nml_file) diff --git a/neuroml/hdf5/__init__.py b/neuroml/hdf5/__init__.py index ca3ab688..74d95cb1 100644 --- a/neuroml/hdf5/__init__.py +++ b/neuroml/hdf5/__init__.py @@ -1,26 +1,23 @@ - import neuroml import numpy import sys def get_str_attribute_group(group, name): - if not hasattr(group._v_attrs,name): + if not hasattr(group._v_attrs, name): return None - for attrName in group._v_attrs._v_attrnames: - if attrName == name: - val = group._v_attrs[name] - - if isinstance(val,numpy.ndarray): - val = val[0] - elif isinstance(val, numpy.bytes_): - val = val.decode('UTF-8') - else: - val = str(val) - - #print("- Found [%s] in [%s]: %s = [%s] %s"%(attrName, group, name,val,type(val))) - return val - return None + for attrName in group._v_attrs._v_attrnames: + if attrName == name: + val = group._v_attrs[name] + if isinstance(val, numpy.ndarray): + val = val[0] + elif isinstance(val, numpy.bytes_): + val = val.decode("UTF-8") + else: + val = str(val) + # print("- Found [%s] in [%s]: %s = [%s] %s"%(attrName, group, name,val,type(val))) + return val + return None diff --git a/neuroml/loaders.py b/neuroml/loaders.py index a37dc749..52aa7254 100644 --- a/neuroml/loaders.py +++ b/neuroml/loaders.py @@ -1,4 +1,3 @@ - from neuroml.nml.nml import parse as nmlparse from neuroml.nml.nml import parseString as nmlparsestring @@ -12,73 +11,77 @@ supressGeneratedsWarnings = True + def print_(text, verbose=True): if verbose: prefix = "libNeuroML >>> " - #if not isinstance(text, str): text = text.decode('ascii') + # if not isinstance(text, str): text = text.decode('ascii') if verbose: - print("%s%s"%(prefix, text.replace("\n", "\n"+prefix))) + print("%s%s" % (prefix, text.replace("\n", "\n" + prefix))) class NeuroMLLoader(object): - @classmethod - def load(cls,src): + def load(cls, src): doc = cls.__nml2_doc(src) return doc - @classmethod - def __nml2_doc(cls,file_name): + @classmethod + def __nml2_doc(cls, file_name): import sys try: - - if supressGeneratedsWarnings: warnings.simplefilter("ignore") + + if supressGeneratedsWarnings: + warnings.simplefilter("ignore") nml2_doc = nmlparse(file_name) - if supressGeneratedsWarnings: warnings.resetwarnings() + if supressGeneratedsWarnings: + warnings.resetwarnings() except Exception as e: - raise Exception("Not a valid NeuroML 2 doc (%s): %s" % (file_name,e), e) - + raise Exception("Not a valid NeuroML 2 doc (%s): %s" % (file_name, e), e) + return nml2_doc class NeuroMLHdf5Loader(object): - @classmethod - def load(cls,src,optimized=False): - doc = cls.__nml2_doc(src,optimized) + def load(cls, src, optimized=False): + doc = cls.__nml2_doc(src, optimized) return doc - @classmethod - def __nml2_doc(cls,file_name,optimized=False): + @classmethod + def __nml2_doc(cls, file_name, optimized=False): import sys - + import logging - logging.basicConfig(level=logging.INFO, format="%(name)-19s %(levelname)-5s - %(message)s") - + + logging.basicConfig( + level=logging.INFO, format="%(name)-19s %(levelname)-5s - %(message)s" + ) + from neuroml.hdf5.NeuroMLHdf5Parser import NeuroMLHdf5Parser - + if optimized: - - currParser = NeuroMLHdf5Parser(None,optimized=True) - + + currParser = NeuroMLHdf5Parser(None, optimized=True) + currParser.parse(file_name) - + return currParser.get_nml_doc() else: - + from neuroml.hdf5.NetworkBuilder import NetworkBuilder - nmlHandler = NetworkBuilder() + nmlHandler = NetworkBuilder() - currParser = NeuroMLHdf5Parser(nmlHandler) + currParser = NeuroMLHdf5Parser(nmlHandler) currParser.parse(file_name) nml2_doc = nmlHandler.get_nml_doc() if currParser.nml_doc_extra_elements: - add_all_to_document(currParser.nml_doc_extra_elements,nml2_doc) + add_all_to_document(currParser.nml_doc_extra_elements, nml2_doc) return nml2_doc @@ -87,66 +90,70 @@ class SWCLoader(object): """ WARNING: Class defunct """ - + @classmethod - def load_swc_single(cls, src, name=None): - + def load_swc_single(cls, src, name=None): + import numpy as np from neuroml import arraymorph - - dtype= {'names': ('id', 'type', 'x','y','z','r','pid'), - 'formats': ('int32', 'int32', 'f4','f4','f4','f4','int32') } - - d = np.loadtxt(src,dtype=dtype ) - - if len( np.nonzero( d['pid']==-1)) != 1: - assert False, "Unexpected number of id's of -1 in file" - - num_nodes=len(d['pid']) - - root_index=np.where(d['pid']==-1)[0][0] - - # We might not nessesarily have continuous indices in the + + dtype = { + "names": ("id", "type", "x", "y", "z", "r", "pid"), + "formats": ("int32", "int32", "f4", "f4", "f4", "f4", "int32"), + } + + d = np.loadtxt(src, dtype=dtype) + + if len(np.nonzero(d["pid"] == -1)) != 1: + assert False, "Unexpected number of id's of -1 in file" + + num_nodes = len(d["pid"]) + + root_index = np.where(d["pid"] == -1)[0][0] + + # We might not nessesarily have continuous indices in the # SWC file, so lets convert them: - index_to_id = d['id'] - id_to_index_dict = dict( [(id,index) for index,id in enumerate(index_to_id) ] ) + index_to_id = d["id"] + id_to_index_dict = dict([(id, index) for index, id in enumerate(index_to_id)]) if len(id_to_index_dict) != len(index_to_id): - s = "Internal Error Loading SWC: Index and ID map are different lengths." - s += " [ID:%d, Index:%d]"%( len(index_to_id), len(id_to_index_dict) ) + s = "Internal Error Loading SWC: Index and ID map are different lengths." + s += " [ID:%d, Index:%d]" % (len(index_to_id), len(id_to_index_dict)) # TODO: this is undefined!! raise MorphologyImportError(s) # noqa: F821 - + # Vertices and section types are easy: - vertices = d[ ['x','y','z','r'] ] - vertices = np.vstack( [d['x'], d['y'],d['z'],d['r'] ]).T - section_types = [ swctype for ID,swctype in d[['id','type']]] + vertices = d[["x", "y", "z", "r"]] + vertices = np.vstack([d["x"], d["y"], d["z"], d["r"]]).T + section_types = [swctype for ID, swctype in d[["id", "type"]]] - #for connection indices we want the root to have index -1: - connection_indices=np.zeros(num_nodes,dtype='int32') + # for connection indices we want the root to have index -1: + connection_indices = np.zeros(num_nodes, dtype="int32") for i in range(num_nodes): - pID=d['pid'][i] - if pID !=-1: - parent_index=id_to_index_dict[pID] - connection_indices[i]=parent_index + pID = d["pid"][i] + if pID != -1: + parent_index = id_to_index_dict[pID] + connection_indices[i] = parent_index else: - connection_indices[i]=-1 + connection_indices[i] = -1 - #This needs to become an "Optimized Morphology" of some kind - return arraymorph.ArrayMorphology(vertices=vertices, - connectivity=connection_indices, - node_types=section_types, - name=name ) + # This needs to become an "Optimized Morphology" of some kind + return arraymorph.ArrayMorphology( + vertices=vertices, + connectivity=connection_indices, + node_types=section_types, + name=name, + ) class JSONLoader(object): - @classmethod - def load(cls,file): + def load(cls, file): from jsonpickle import decode as json_decode - if isinstance(file,str): - fileh = open(file,'r') + + if isinstance(file, str): + fileh = open(file, "r") else: fileh = file @@ -154,28 +161,25 @@ def load(cls,file): unpickled = json_decode(json_string) fileh.close() return unpickled - + @classmethod - def load_from_mongodb(cls, - db, - id, - host=None, - port=None): - + def load_from_mongodb(cls, db, id, host=None, port=None): + from pymongo import MongoClient + try: import simplejson as json except ImportError: import json - + from jsonpickle import decode as json_decode if host == None: - host = 'localhost' + host = "localhost" if port == None: port = 27017 - client = MongoClient(host,port) + client = MongoClient(host, port) db = client[db] @@ -183,27 +187,27 @@ def load_from_mongodb(cls, doc = collection.find_one() - del doc['_id'] + del doc["_id"] doc = json.dumps(doc) document = json_decode(doc) return document - -class ArrayMorphLoader(object): + +class ArrayMorphLoader(object): @classmethod def __extract_morphology(cls, node): - - from neuroml import arraymorph - - loaded_morphology = arraymorph.ArrayMorphology() - loaded_morphology.physical_mask = node.physical_mask[:] - loaded_morphology.vertices = node.vertices[:] - loaded_morphology.connectivity = node.connectivity[:] - return loaded_morphology + from neuroml import arraymorph + + loaded_morphology = arraymorph.ArrayMorphology() + loaded_morphology.physical_mask = node.physical_mask[:] + loaded_morphology.vertices = node.vertices[:] + loaded_morphology.connectivity = node.connectivity[:] + + return loaded_morphology @classmethod def load(cls, filepath): @@ -212,12 +216,13 @@ def load(cls, filepath): TODO: Complete refactoring. """ import tables - with tables.open_file(filepath,mode='r') as file: + + with tables.open_file(filepath, mode="r") as file: document = neuroml.NeuroMLDocument() for node in file.root: - if hasattr(node,'vertices'): + if hasattr(node, "vertices"): loaded_morphology = cls.__extract_morphology(node) document.morphology.append(loaded_morphology) else: @@ -228,88 +233,139 @@ def load(cls, filepath): return document -def read_neuroml2_file(nml2_file_name, include_includes=False, verbose=False, - already_included=[], print_method=print_, optimized=False): - +def read_neuroml2_file( + nml2_file_name, + include_includes=False, + verbose=False, + already_included=[], + print_method=print_, + optimized=False, +): + print_method("Loading NeuroML2 file: %s" % nml2_file_name, verbose) - + if not os.path.isfile(nml2_file_name): print_method("Unable to find file: %s!" % nml2_file_name, True) sys.exit() - - return _read_neuroml2(nml2_file_name, include_includes=include_includes, verbose=verbose, - already_included=already_included, print_method=print_method, optimized=optimized) - - - -def read_neuroml2_string(nml2_string, include_includes=False, verbose=False, - already_included=[], print_method=print_, optimized=False, base_path=None): - + + return _read_neuroml2( + nml2_file_name, + include_includes=include_includes, + verbose=verbose, + already_included=already_included, + print_method=print_method, + optimized=optimized, + ) + + +def read_neuroml2_string( + nml2_string, + include_includes=False, + verbose=False, + already_included=[], + print_method=print_, + optimized=False, + base_path=None, +): + print_method("Loading NeuroML2 string, base_path: %s" % base_path, verbose) - - return _read_neuroml2(nml2_string, include_includes=include_includes, verbose=verbose, - already_included=already_included, print_method=print_method, - optimized=optimized, base_path=base_path) - - - -def _read_neuroml2(nml2_file_name_or_string, include_includes=False, verbose=False, - already_included=[], print_method=print_, optimized=False, base_path=None): - - #print("................ Loading: %s"%nml2_file_name_or_string[:7]) - - base_path_to_use = os.path.dirname(os.path.realpath(nml2_file_name_or_string)) if base_path == None else base_path - - - if supressGeneratedsWarnings: warnings.simplefilter("ignore") - - if not isinstance(nml2_file_name_or_string, str) or nml2_file_name_or_string.startswith('<'): + + return _read_neuroml2( + nml2_string, + include_includes=include_includes, + verbose=verbose, + already_included=already_included, + print_method=print_method, + optimized=optimized, + base_path=base_path, + ) + + +def _read_neuroml2( + nml2_file_name_or_string, + include_includes=False, + verbose=False, + already_included=[], + print_method=print_, + optimized=False, + base_path=None, +): + + # print("................ Loading: %s"%nml2_file_name_or_string[:7]) + + base_path_to_use = ( + os.path.dirname(os.path.realpath(nml2_file_name_or_string)) + if base_path == None + else base_path + ) + + if supressGeneratedsWarnings: + warnings.simplefilter("ignore") + + if not isinstance( + nml2_file_name_or_string, str + ) or nml2_file_name_or_string.startswith("<"): nml2_doc = nmlparsestring(nml2_file_name_or_string) - base_path_to_use = './' if base_path == None else base_path - elif nml2_file_name_or_string.endswith('.h5') or nml2_file_name_or_string.endswith('.hdf5'): - nml2_doc = NeuroMLHdf5Loader.load(nml2_file_name_or_string,optimized=optimized) + base_path_to_use = "./" if base_path == None else base_path + elif nml2_file_name_or_string.endswith(".h5") or nml2_file_name_or_string.endswith( + ".hdf5" + ): + nml2_doc = NeuroMLHdf5Loader.load(nml2_file_name_or_string, optimized=optimized) else: nml2_doc = NeuroMLLoader.load(nml2_file_name_or_string) - - if supressGeneratedsWarnings: warnings.resetwarnings() - + + if supressGeneratedsWarnings: + warnings.resetwarnings() + if include_includes: - print_method('Including included files (included already: %s)' \ - % already_included, verbose) - + print_method( + "Including included files (included already: %s)" % already_included, + verbose, + ) + for include in nml2_doc.includes: incl_loc = os.path.abspath(os.path.join(base_path_to_use, include.href)) if incl_loc not in already_included: - print_method("Loading included NeuroML2 file: %s (base: %s, resolved: %s)" % (include.href, base_path_to_use, incl_loc), - verbose) - - if incl_loc.endswith('.nml') or incl_loc.endswith('.xml'): - nml2_sub_doc = read_neuroml2_file(incl_loc, True, - verbose=verbose, already_included=already_included) + print_method( + "Loading included NeuroML2 file: %s (base: %s, resolved: %s)" + % (include.href, base_path_to_use, incl_loc), + verbose, + ) + + if incl_loc.endswith(".nml") or incl_loc.endswith(".xml"): + nml2_sub_doc = read_neuroml2_file( + incl_loc, + True, + verbose=verbose, + already_included=already_included, + ) already_included.append(incl_loc) - add_all_to_document(nml2_sub_doc,nml2_doc) - - elif incl_loc.endswith('.nml.h5'): + add_all_to_document(nml2_sub_doc, nml2_doc) + + elif incl_loc.endswith(".nml.h5"): nml2_sub_doc = NeuroMLHdf5Loader.load(incl_loc) already_included.append(incl_loc) - add_all_to_document(nml2_sub_doc,nml2_doc) - + add_all_to_document(nml2_sub_doc, nml2_doc) + else: - raise Exception("Unrecognised extension on file: %s"%incl_loc) - + raise Exception("Unrecognised extension on file: %s" % incl_loc) + nml2_doc.includes = [] - + else: - if len(nml2_doc.includes)>0: - print_method('NOT including included files, even though %s are included!'%len(nml2_doc.includes), verbose) - - + if len(nml2_doc.includes) > 0: + print_method( + "NOT including included files, even though %s are included!" + % len(nml2_doc.includes), + verbose, + ) + nml2_doc.includes = [] - + return nml2_doc -if __name__ == '__main__': - +if __name__ == "__main__": + nml_doc = read_neuroml2_file(sys.argv[1]) print(nml_doc.summary()) diff --git a/neuroml/nml/config.py b/neuroml/nml/config.py index a07a19bc..60c26d4b 100644 --- a/neuroml/nml/config.py +++ b/neuroml/nml/config.py @@ -1 +1 @@ -variables={'schema_name':'NeuroML_v2.2.xsd'} +variables = {"schema_name": "NeuroML_v2.2.xsd"} diff --git a/neuroml/nml/generateds_config.py b/neuroml/nml/generateds_config.py index 83fc889d..706eb7a3 100644 --- a/neuroml/nml/generateds_config.py +++ b/neuroml/nml/generateds_config.py @@ -1,6 +1,6 @@ -#Maps from NeuroML element space (Key) to python object space (value) -#The class names are essentially correct, the instance names need converting -#also attributes need fixing +# Maps from NeuroML element space (Key) to python object space (value) +# The class names are essentially correct, the instance names need converting +# also attributes need fixing import lxml from lxml import objectify @@ -11,26 +11,32 @@ sys.setrecursionlimit(10000) + def remove_curlies(string): - return re.sub("{.*}","",string) + return re.sub("{.*}", "", string) + def to_lowercase_with_underscores(string): - s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', string) - return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() + s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", string) + return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower() + def to_camelback(string): string_list = list(string) i = 0 - for m in re.finditer('_', string): + for m in re.finditer("_", string): underscore_index = m.end() - string_list[underscore_index - i] = string_list[underscore_index - i].capitalize() + string_list[underscore_index - i] = string_list[ + underscore_index - i + ].capitalize() string_list.pop(underscore_index - (1 + i)) i += 1 - string = ''.join(string_list) + string = "".join(string_list) return str(string) -def traverse_doc(queue,rename): + +def traverse_doc(queue, rename): """Recursive function to traverse the nodes of a tree in breadth-first order. @@ -43,24 +49,26 @@ def traverse_doc(queue,rename): children = node.getchildren() rename(node) queue = queue + children - traverse_doc(queue,rename) + traverse_doc(queue, rename) else: return None -def pluralize(noun): - if re.search('[sxz]$', noun): - return re.sub('$', 'es', noun) - elif re.search('[^aeioudgkprt]h$', noun): - return re.sub('$', 'es', noun) - elif re.search('[^aeiou]y$', noun): - return re.sub('y$', 'ies', noun) - else: - return noun + 's' + +def pluralize(noun): + if re.search("[sxz]$", noun): + return re.sub("$", "es", noun) + elif re.search("[^aeioudgkprt]h$", noun): + return re.sub("$", "es", noun) + elif re.search("[^aeiou]y$", noun): + return re.sub("y$", "ies", noun) + else: + return noun + "s" + def _node_to_python(node): - pluralize_flag = 'maxOccurs' in node.attrib - + pluralize_flag = "maxOccurs" in node.attrib + for attribute in node.attrib: nml_attribute = node.attrib.pop(attribute) if nml_attribute[0].islower(): @@ -69,16 +77,16 @@ def _node_to_python(node): renamed_attribute = pluralize(renamed_attribute) NameTable[nml_attribute] = renamed_attribute -filename = variables['schema_name'] + +filename = variables["schema_name"] import StringIO import process_includes outfile = StringIO.StringIO() -infile = open(filename, 'r') +infile = open(filename, "r") -process_includes.process_include_files(infile, outfile, - inpath=filename) +process_includes.process_include_files(infile, outfile, inpath=filename) infile.close() outfile.seek(0) doc = objectify.parse(outfile) @@ -87,10 +95,11 @@ def _node_to_python(node): NameTable = {} -traverse_doc(queue,_node_to_python) +traverse_doc(queue, _node_to_python) -#filtering routine, need to get a better way to extract these, asked on Stack Overflow +# filtering routine, need to get a better way to extract these, asked on Stack Overflow import keyword + disallowed_keywords = keyword.kwlist for keyword in disallowed_keywords: try: @@ -98,55 +107,54 @@ def _node_to_python(node): except: pass -NameTable['morphology'] = 'morphology' #overriding change to -#"morphologies" because it only applies outside of a cell - not a very -#elegant solution -NameTable['gateHHtauInf'] = 'gate_hh_tau_infs' -NameTable['ionChannelHH'] = 'ion_channel_hh' -NameTable['gateHHrates'] = 'gate_hh_rates' -NameTable['gateHHtauInf'] = 'gate_hh_tau_infs' - -NameTable['ionChannel'] = 'ion_channel' -NameTable['ionChannelHH'] = 'ion_channel_hhs' +NameTable["morphology"] = "morphology" # overriding change to +# "morphologies" because it only applies outside of a cell - not a very +# elegant solution +NameTable["gateHHtauInf"] = "gate_hh_tau_infs" +NameTable["ionChannelHH"] = "ion_channel_hh" +NameTable["gateHHrates"] = "gate_hh_rates" +NameTable["gateHHtauInf"] = "gate_hh_tau_infs" +NameTable["ionChannel"] = "ion_channel" +NameTable["ionChannelHH"] = "ion_channel_hhs" -NameTable['basePyNNCell'] = 'basePyNNCell' -NameTable['basePyNNIaFCell'] = 'basePyNNIaFCell' -NameTable['basePyNNIaFCondCell'] = 'basePyNNIaFCondCell' -NameTable['tau_syn_E'] = 'tau_syn_E' -NameTable['tau_syn_I'] = 'tau_syn_I' +NameTable["basePyNNCell"] = "basePyNNCell" +NameTable["basePyNNIaFCell"] = "basePyNNIaFCell" +NameTable["basePyNNIaFCondCell"] = "basePyNNIaFCondCell" +NameTable["tau_syn_E"] = "tau_syn_E" +NameTable["tau_syn_I"] = "tau_syn_I" -NameTable['e_rev_E'] = 'e_rev_E' -NameTable['e_rev_I'] = 'e_rev_I' -NameTable['e_rev_Na'] = 'e_rev_Na' -NameTable['e_rev_K'] = 'e_rev_K' +NameTable["e_rev_E"] = "e_rev_E" +NameTable["e_rev_I"] = "e_rev_I" +NameTable["e_rev_Na"] = "e_rev_Na" +NameTable["e_rev_K"] = "e_rev_K" -NameTable['gbar_K'] = 'gbar_K' -NameTable['gbar_Na'] = 'gbar_Na' +NameTable["gbar_K"] = "gbar_K" +NameTable["gbar_Na"] = "gbar_Na" -NameTable['delta_T'] = 'delta_T' +NameTable["delta_T"] = "delta_T" -NameTable['IF_curr_alpha'] = 'IF_curr_alpha' -NameTable['IF_curr_exp'] = 'IF_curr_exp' -NameTable['IF_cond_alpha'] = 'IF_cond_alpha' -NameTable['IF_cond_exp'] = 'IF_cond_exp' +NameTable["IF_curr_alpha"] = "IF_curr_alpha" +NameTable["IF_curr_exp"] = "IF_curr_exp" +NameTable["IF_cond_alpha"] = "IF_cond_alpha" +NameTable["IF_cond_exp"] = "IF_cond_exp" -NameTable['extracellularProperties'] = 'extracellular_properties' -NameTable['intracellularProperties'] = 'intracellular_properties' -NameTable['biophysicalProperties'] = 'biophysical_properties' +NameTable["extracellularProperties"] = "extracellular_properties" +NameTable["intracellularProperties"] = "intracellular_properties" +NameTable["biophysicalProperties"] = "biophysical_properties" print("NameTable is as follows:") print(NameTable) print("Saving NameTable to csv file") -writer = csv.writer(open('name_table.csv', 'wb')) +writer = csv.writer(open("name_table.csv", "wb")) for key, value in NameTable.items(): - writer.writerow([key, value]) + writer.writerow([key, value]) -print ("Saving name changes table to csv file") -changes_writer = csv.writer(open('changed_names.csv','wb')) +print("Saving name changes table to csv file") +changes_writer = csv.writer(open("changed_names.csv", "wb")) for key in NameTable: value = NameTable[key] if key != value: - changes_writer.writerow([key,value]) + changes_writer.writerow([key, value]) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index 3a4dead0..bc565e98 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -6,8 +6,7 @@ # your method specification file. # class MethodSpec(object): - def __init__(self, name='', source='', class_names='', - class_names_compiled=None): + def __init__(self, name="", source="", class_names="", class_names_compiled=None): """MethodSpec -- A specification of a method. Member variables: name -- The method name @@ -21,55 +20,68 @@ class names in which the method is to be inserted. self.name = name self.source = source self.class_names = class_names - ''' + """ if class_names is None: self.class_names = ('.*', ) else: if class_names_compiled is None: self.class_names_compiled = re.compile(self.class_names) else: - self.class_names_compiled = class_names_compiled''' + self.class_names_compiled = class_names_compiled""" + def get_name(self): return self.name + def set_name(self, name): self.name = name + def get_source(self): return self.source + def set_source(self, source): self.source = source + def get_class_names(self): return self.class_names + def set_class_names(self, class_names): self.class_names = class_names self.class_names_compiled = re.compile(class_names) + def get_class_names_compiled(self): return self.class_names_compiled + def set_class_names_compiled(self, class_names_compiled): self.class_names_compiled = class_names_compiled + def match_name(self, class_name): """Match against the name of the class currently being generated. If this method returns True, the method will be inserted in the generated class. """ - if self.class_names == class_name or (isinstance(self.class_names,list) and class_name in self.class_names): + if self.class_names == class_name or ( + isinstance(self.class_names, list) and class_name in self.class_names + ): return True else: return False + def get_interpolated_source(self, values_dict): """Get the method source code, interpolating values from values_dict into it. The source returned by this method is inserted into the generated class. """ source = self.source % values_dict - source = source.replace('PERCENTAGE','%') + source = source.replace("PERCENTAGE", "%") return source + def show(self): - print('specification:') - print(' name: %s' % (self.name, )) + print("specification:") + print(" name: %s" % (self.name,)) print(self.source) - print(' class_names: %s' % (self.class_names, )) - print(' names pat : %s' % (self.class_names_compiled.pattern, )) + print(" class_names: %s" % (self.class_names,)) + print(" names pat : %s" % (self.class_names_compiled.pattern,)) # @@ -81,7 +93,8 @@ def show(self): # generated superclass file and also section "User Methods" in # the documentation, as well as the examples below. -num_segments = MethodSpec(name='num_segments', +num_segments = MethodSpec( + name="num_segments", source='''\ @property def num_segments(self): @@ -92,11 +105,12 @@ def num_segments(self): """ return len(self.segments) ''', - class_names=("Morphology") - ) + class_names=("Morphology"), +) -length = MethodSpec(name='length', +length = MethodSpec( + name="length", source='''\ @property def length(self): @@ -130,10 +144,11 @@ def __repr__(self): return str(self) ''', - class_names=("Segment") - ) + class_names=("Segment"), +) -volume = MethodSpec(name='volume', +volume = MethodSpec( + name="volume", source='''\ @property def volume(self): @@ -165,11 +180,12 @@ def volume(self): return volume ''', - class_names=("Segment") - ) + class_names=("Segment"), +) -surface_area = MethodSpec(name='surface_area', +surface_area = MethodSpec( + name="surface_area", source='''\ @property @@ -203,23 +219,24 @@ def surface_area(self): return surface_area ''', - class_names=("Segment") - ) + class_names=("Segment"), +) # # Provide a list of your method specifications. # This list of specifications must be named METHOD_SPECS. # -METHOD_SPECS=(length, - volume, - surface_area, - num_segments, - ) +METHOD_SPECS = ( + length, + volume, + surface_area, + num_segments, +) - -seg_grp = MethodSpec(name='SegmentGroup', - source='''\ +seg_grp = MethodSpec( + name="SegmentGroup", + source="""\ def __str__(self): @@ -230,13 +247,14 @@ def __repr__(self): return str(self) -''', - class_names=("SegmentGroup") - ) +""", + class_names=("SegmentGroup"), +) -METHOD_SPECS+=(seg_grp,) +METHOD_SPECS += (seg_grp,) -seg_grp = MethodSpec(name='Point3DWithDiam', +seg_grp = MethodSpec( + name="Point3DWithDiam", source='''\ def __str__(self): @@ -267,13 +285,14 @@ def distance_to(self, other_3d_point): return distance ''', - class_names=("Point3DWithDiam") - ) + class_names=("Point3DWithDiam"), +) -METHOD_SPECS+=(seg_grp,) +METHOD_SPECS += (seg_grp,) -connection_cell_ids = MethodSpec(name='connection_cell_ids', +connection_cell_ids = MethodSpec( + name="connection_cell_ids", source='''\ def _get_cell_id(self, id_string): @@ -344,12 +363,13 @@ def __str__(self): return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info()) ''', - class_names=(["Connection","ConnectionWD"]) - ) + class_names=(["Connection", "ConnectionWD"]), +) -METHOD_SPECS+=(connection_cell_ids,) +METHOD_SPECS += (connection_cell_ids,) -connection_wd_cell_ids = MethodSpec(name='connection_wd_cell_ids', +connection_wd_cell_ids = MethodSpec( + name="connection_wd_cell_ids", source='''\ def __str__(self): @@ -369,13 +389,14 @@ def get_delay_in_ms(self): return float(self.delay[:-1].strip())*1000.0 ''', - class_names=("ConnectionWD") - ) + class_names=("ConnectionWD"), +) -METHOD_SPECS+=(connection_wd_cell_ids,) +METHOD_SPECS += (connection_wd_cell_ids,) -elec_connection_instance_cell_ids = MethodSpec(name='elec_connection_instance_cell_ids', - source='''\ +elec_connection_instance_cell_ids = MethodSpec( + name="elec_connection_instance_cell_ids", + source="""\ def _get_cell_id(self, id_string): if '[' in id_string: @@ -389,13 +410,14 @@ def __str__(self): ", synapse: "+str(self.synapse) - ''', - class_names=("ElectricalConnectionInstance") - ) + """, + class_names=("ElectricalConnectionInstance"), +) -METHOD_SPECS+=(elec_connection_instance_cell_ids,) +METHOD_SPECS += (elec_connection_instance_cell_ids,) -elec_connection_instance_w = MethodSpec(name='elec_connection_instance_w', +elec_connection_instance_w = MethodSpec( + name="elec_connection_instance_w", source='''\ def get_weight(self): @@ -416,12 +438,13 @@ def __str__(self): ", synapse: "+str(self.synapse) + ", weight: "+'PERCENTAGE.6f'PERCENTAGEself.get_weight() ''', - class_names=("ElectricalConnectionInstanceW") - ) + class_names=("ElectricalConnectionInstanceW"), +) -METHOD_SPECS+=(elec_connection_instance_w,) +METHOD_SPECS += (elec_connection_instance_w,) -elec_connection_cell_ids = MethodSpec(name='elec_connection_cell_ids', +elec_connection_cell_ids = MethodSpec( + name="elec_connection_cell_ids", source='''\ def _get_cell_id(self, id_string): @@ -492,13 +515,14 @@ def __str__(self): ''', - class_names=("ElectricalConnection") - ) + class_names=("ElectricalConnection"), +) -METHOD_SPECS+=(elec_connection_cell_ids,) +METHOD_SPECS += (elec_connection_cell_ids,) -cont_connection_instance_cell_ids = MethodSpec(name='cont_connection_instance_cell_ids', - source='''\ +cont_connection_instance_cell_ids = MethodSpec( + name="cont_connection_instance_cell_ids", + source="""\ def _get_cell_id(self, id_string): if '[' in id_string: @@ -513,13 +537,14 @@ def __str__(self): ", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component) - ''', - class_names=("ContinuousConnectionInstance") - ) + """, + class_names=("ContinuousConnectionInstance"), +) -METHOD_SPECS+=(cont_connection_instance_cell_ids,) +METHOD_SPECS += (cont_connection_instance_cell_ids,) -cont_connection_instance_w = MethodSpec(name='cont_connection_instance_w', +cont_connection_instance_w = MethodSpec( + name="cont_connection_instance_w", source='''\ def get_weight(self): @@ -537,12 +562,13 @@ def __str__(self): ''', - class_names=("ContinuousConnectionInstanceW") - ) + class_names=("ContinuousConnectionInstanceW"), +) -METHOD_SPECS+=(cont_connection_instance_w,) +METHOD_SPECS += (cont_connection_instance_w,) -cont_connection_cell_ids = MethodSpec(name='cont_connection_cell_ids', +cont_connection_cell_ids = MethodSpec( + name="cont_connection_cell_ids", source='''\ def _get_cell_id(self, id_string): @@ -614,14 +640,15 @@ def __str__(self): ''', - class_names=("ContinuousConnection") - ) + class_names=("ContinuousConnection"), +) -METHOD_SPECS+=(cont_connection_cell_ids,) +METHOD_SPECS += (cont_connection_cell_ids,) -instance = MethodSpec(name='instance', - source='''\ +instance = MethodSpec( + name="instance", + source="""\ def __str__(self): @@ -631,14 +658,15 @@ def __repr__(self): return str(self) -''', - class_names=("Instance") - ) -METHOD_SPECS+=(instance,) +""", + class_names=("Instance"), +) +METHOD_SPECS += (instance,) -location = MethodSpec(name='location', - source='''\ +location = MethodSpec( + name="location", + source="""\ def _format(self,value): @@ -655,15 +683,14 @@ def __repr__(self): return str(self) -''', - class_names=("Location") - ) -METHOD_SPECS+=(location,) - +""", + class_names=("Location"), +) +METHOD_SPECS += (location,) - -input_cell_ids = MethodSpec(name='input_cell_ids', +input_cell_ids = MethodSpec( + name="input_cell_ids", source='''\ def _get_cell_id(self, id_string): @@ -697,13 +724,14 @@ def __str__(self): return "Input "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'PERCENTAGE.6f'PERCENTAGEself.get_fraction_along()+")" ''', - class_names=(["Input","ExplicitInput"]) - ) + class_names=(["Input", "ExplicitInput"]), +) -METHOD_SPECS+=(input_cell_ids,) +METHOD_SPECS += (input_cell_ids,) -input_w = MethodSpec(name='input_w', +input_w = MethodSpec( + name="input_w", source='''\ def get_weight(self): @@ -719,13 +747,14 @@ def __str__(self): return "Input (weight) "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'PERCENTAGE.6f'PERCENTAGEself.get_fraction_along()+"), weight: "+'PERCENTAGE.6f'PERCENTAGEself.get_weight() ''', - class_names=(["InputW"]) - ) + class_names=(["InputW"]), +) -METHOD_SPECS+=(input_w,) +METHOD_SPECS += (input_w,) -nml_doc_summary = MethodSpec(name='summary', +nml_doc_summary = MethodSpec( + name="summary", source='''\ @@ -900,12 +929,13 @@ def append(self,element): append_to_element(self,element) ''', - class_names=("NeuroMLDocument") - ) + class_names=("NeuroMLDocument"), +) -METHOD_SPECS+=(nml_doc_summary,) +METHOD_SPECS += (nml_doc_summary,) -network_get_by_id = MethodSpec(name='get_by_id', +network_get_by_id = MethodSpec( + name="get_by_id", source='''\ warn_count = 0 @@ -939,13 +969,14 @@ def __str__(self): return "Network "+str(self.id)+" with "+str(len(self.populations))+" population(s)" ''', - class_names=("Network") - ) + class_names=("Network"), +) -METHOD_SPECS+=(network_get_by_id,) +METHOD_SPECS += (network_get_by_id,) -cell_methods = MethodSpec(name='cell_methods', +cell_methods = MethodSpec( + name="cell_methods", source='''\ @@ -1300,15 +1331,17 @@ def summary(self): print("*******************************************************") ''', - class_names=("Cell") - ) + class_names=("Cell"), +) -METHOD_SPECS+=(cell_methods,) +METHOD_SPECS += (cell_methods,) -inserts = {} +inserts = {} -inserts['Network'] = ''' +inserts[ + "Network" +] = """ import numpy @@ -1339,9 +1372,11 @@ def summary(self): for il in self.input_lists: il.exportHdf5(h5file, netGroup) -''' +""" -inserts['Population'] = ''' +inserts[ + "Population" +] = """ import numpy @@ -1383,9 +1418,11 @@ def __str__(self): return "Population: "+str(self.id)+" with "+str( self.get_size() )+" components of type "+(self.component if self.component else "???") -''' +""" -inserts['Projection'] = ''' +inserts[ + "Projection" +] = """ import numpy @@ -1474,9 +1511,11 @@ def __str__(self): -''' +""" -inserts['ElectricalProjection'] = ''' +inserts[ + "ElectricalProjection" +] = """ import numpy @@ -1548,11 +1587,12 @@ def __str__(self): for col in extra_cols.keys(): array._f_setattr(col,extra_cols[col]) -''' - +""" -inserts['ContinuousProjection'] = ''' +inserts[ + "ContinuousProjection" +] = """ import numpy @@ -1630,10 +1670,12 @@ def __str__(self): array._f_setattr(k, extra_cols[k]) -''' +""" -inserts['InputList'] = ''' +inserts[ + "InputList" +] = """ import numpy @@ -1685,25 +1727,27 @@ def __str__(self): return "Input list: "+self.id+" to "+self.populations+", component "+self.component -''' - +""" for insert in inserts.keys(): - ms = MethodSpec(name='exportHdf5', - source='''\ + ms = MethodSpec( + name="exportHdf5", + source='''\ def exportHdf5(self, h5file, h5Group): """Export to HDF5 file. """ #print("Exporting %s: "+str(self.id)+" as HDF5") %s - '''%(insert,inserts[insert]), - class_names=(insert) + ''' + % (insert, inserts[insert]), + class_names=(insert), ) - METHOD_SPECS+=(ms,) + METHOD_SPECS += (ms,) -synaptic_connections = MethodSpec(name='synaptic_connections', +synaptic_connections = MethodSpec( + name="synaptic_connections", source='''\ def _get_cell_id(self,ref): @@ -1728,12 +1772,13 @@ def __str__(self): ''', - class_names=("SynapticConnection") - ) + class_names=("SynapticConnection"), +) -METHOD_SPECS+=(synaptic_connections,) +METHOD_SPECS += (synaptic_connections,) -explicit_inputs = MethodSpec(name='explicit_inputs', +explicit_inputs = MethodSpec( + name="explicit_inputs", source='''\ def get_target_cell_id(self,): @@ -1758,18 +1803,20 @@ def __str__(self): ''', - class_names=("ExplicitInput") - ) + class_names=("ExplicitInput"), +) + +METHOD_SPECS += (explicit_inputs,) -METHOD_SPECS+=(explicit_inputs,) def test(): for spec in METHOD_SPECS: spec.show() + def main(): test() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index a51fdf5e..5ecb0ae2 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -26,6 +26,7 @@ import base64 import datetime as datetime_ import warnings as warnings_ + try: from lxml import etree as etree_ except ImportError: @@ -51,6 +52,7 @@ def parsexml_(infile, parser=None, **kwargs): doc = etree_.parse(infile, parser=parser, **kwargs) return doc + def parsexmlstring_(instring, parser=None, **kwargs): if parser is None: # Use the lxml ElementTree compatible parser so that, e.g., @@ -63,6 +65,7 @@ def parsexmlstring_(instring, parser=None, **kwargs): element = etree_.fromstring(instring, parser=parser, **kwargs) return element + # # Namespace prefix definition table (and other attributes, too) # @@ -99,96 +102,123 @@ def parsexmlstring_(instring, parser=None, **kwargs): try: from generatedssuper import GeneratedsSuper except ImportError as exp: - + class GeneratedsSuper(object): - tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + tzoff_pattern = re_.compile(r"(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$") + class _FixedOffsetTZ(datetime_.tzinfo): def __init__(self, offset, name): self.__offset = datetime_.timedelta(minutes=offset) self.__name = name + def utcoffset(self, dt): return self.__offset + def tzname(self, dt): return self.__name + def dst(self, dt): return None - def gds_format_string(self, input_data, input_name=''): + + def gds_format_string(self, input_data, input_name=""): return input_data - def gds_validate_string(self, input_data, node=None, input_name=''): + + def gds_validate_string(self, input_data, node=None, input_name=""): if not input_data: - return '' + return "" else: return input_data - def gds_format_base64(self, input_data, input_name=''): + + def gds_format_base64(self, input_data, input_name=""): return base64.b64encode(input_data) - def gds_validate_base64(self, input_data, node=None, input_name=''): + + def gds_validate_base64(self, input_data, node=None, input_name=""): return input_data - def gds_format_integer(self, input_data, input_name=''): - return '%d' % input_data - def gds_validate_integer(self, input_data, node=None, input_name=''): + + def gds_format_integer(self, input_data, input_name=""): + return "%d" % input_data + + def gds_validate_integer(self, input_data, node=None, input_name=""): return input_data - def gds_format_integer_list(self, input_data, input_name=''): - return '%s' % ' '.join(input_data) - def gds_validate_integer_list( - self, input_data, node=None, input_name=''): + + def gds_format_integer_list(self, input_data, input_name=""): + return "%s" % " ".join(input_data) + + def gds_validate_integer_list(self, input_data, node=None, input_name=""): values = input_data.split() for value in values: try: int(value) except (TypeError, ValueError): - raise_parse_error(node, 'Requires sequence of integers') + raise_parse_error(node, "Requires sequence of integers") return values - def gds_format_float(self, input_data, input_name=''): - return ('%.15f' % input_data).rstrip('0') - def gds_validate_float(self, input_data, node=None, input_name=''): + + def gds_format_float(self, input_data, input_name=""): + return ("%.15f" % input_data).rstrip("0") + + def gds_validate_float(self, input_data, node=None, input_name=""): return input_data - def gds_format_float_list(self, input_data, input_name=''): - return '%s' % ' '.join(input_data) - def gds_validate_float_list( - self, input_data, node=None, input_name=''): + + def gds_format_float_list(self, input_data, input_name=""): + return "%s" % " ".join(input_data) + + def gds_validate_float_list(self, input_data, node=None, input_name=""): values = input_data.split() for value in values: try: float(value) except (TypeError, ValueError): - raise_parse_error(node, 'Requires sequence of floats') + raise_parse_error(node, "Requires sequence of floats") return values - def gds_format_double(self, input_data, input_name=''): - return '%e' % input_data - def gds_validate_double(self, input_data, node=None, input_name=''): + + def gds_format_double(self, input_data, input_name=""): + return "%e" % input_data + + def gds_validate_double(self, input_data, node=None, input_name=""): return input_data - def gds_format_double_list(self, input_data, input_name=''): - return '%s' % ' '.join(input_data) - def gds_validate_double_list( - self, input_data, node=None, input_name=''): + + def gds_format_double_list(self, input_data, input_name=""): + return "%s" % " ".join(input_data) + + def gds_validate_double_list(self, input_data, node=None, input_name=""): values = input_data.split() for value in values: try: float(value) except (TypeError, ValueError): - raise_parse_error(node, 'Requires sequence of doubles') + raise_parse_error(node, "Requires sequence of doubles") return values - def gds_format_boolean(self, input_data, input_name=''): - return ('%s' % input_data).lower() - def gds_validate_boolean(self, input_data, node=None, input_name=''): + + def gds_format_boolean(self, input_data, input_name=""): + return ("%s" % input_data).lower() + + def gds_validate_boolean(self, input_data, node=None, input_name=""): return input_data - def gds_format_boolean_list(self, input_data, input_name=''): - return '%s' % ' '.join(input_data) - def gds_validate_boolean_list( - self, input_data, node=None, input_name=''): + + def gds_format_boolean_list(self, input_data, input_name=""): + return "%s" % " ".join(input_data) + + def gds_validate_boolean_list(self, input_data, node=None, input_name=""): values = input_data.split() for value in values: - if value not in ('true', '1', 'false', '0', ): + if value not in ( + "true", + "1", + "false", + "0", + ): raise_parse_error( node, - 'Requires sequence of booleans ' - '("true", "1", "false", "0")') + "Requires sequence of booleans " '("true", "1", "false", "0")', + ) return values - def gds_validate_datetime(self, input_data, node=None, input_name=''): + + def gds_validate_datetime(self, input_data, node=None, input_name=""): return input_data - def gds_format_datetime(self, input_data, input_name=''): + + def gds_format_datetime(self, input_data, input_name=""): if input_data.microsecond == 0: - _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d" % ( input_data.year, input_data.month, input_data.day, @@ -197,63 +227,65 @@ def gds_format_datetime(self, input_data, input_name=''): input_data.second, ) else: - _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d.%s" % ( input_data.year, input_data.month, input_data.day, input_data.hour, input_data.minute, input_data.second, - ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ("%f" % (float(input_data.microsecond) / 1000000))[2:], ) if input_data.tzinfo is not None: tzoff = input_data.tzinfo.utcoffset(input_data) if tzoff is not None: total_seconds = tzoff.seconds + (86400 * tzoff.days) if total_seconds == 0: - _svalue += 'Z' + _svalue += "Z" else: if total_seconds < 0: - _svalue += '-' + _svalue += "-" total_seconds *= -1 else: - _svalue += '+' + _svalue += "+" hours = total_seconds // 3600 minutes = (total_seconds - (hours * 3600)) // 60 - _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) return _svalue + @classmethod def gds_parse_datetime(cls, input_data): tz = None - if input_data[-1] == 'Z': - tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") input_data = input_data[:-1] else: results = GeneratedsSuper.tzoff_pattern.search(input_data) if results is not None: - tzoff_parts = results.group(2).split(':') + tzoff_parts = results.group(2).split(":") tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) - if results.group(1) == '-': + if results.group(1) == "-": tzoff *= -1 - tz = GeneratedsSuper._FixedOffsetTZ( - tzoff, results.group(0)) + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) input_data = input_data[:-6] - time_parts = input_data.split('.') + time_parts = input_data.split(".") if len(time_parts) > 1: - micro_seconds = int(float('0.' + time_parts[1]) * 1000000) - input_data = '%s.%s' % ( - time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) - dt = datetime_.datetime.strptime( - input_data, '%Y-%m-%dT%H:%M:%S.%f') + micro_seconds = int(float("0." + time_parts[1]) * 1000000) + input_data = "%s.%s" % ( + time_parts[0], + "{}".format(micro_seconds).rjust(6, "0"), + ) + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S.%f") else: - dt = datetime_.datetime.strptime( - input_data, '%Y-%m-%dT%H:%M:%S') + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S") dt = dt.replace(tzinfo=tz) return dt - def gds_validate_date(self, input_data, node=None, input_name=''): + + def gds_validate_date(self, input_data, node=None, input_name=""): return input_data - def gds_format_date(self, input_data, input_name=''): - _svalue = '%04d-%02d-%02d' % ( + + def gds_format_date(self, input_data, input_name=""): + _svalue = "%04d-%02d-%02d" % ( input_data.year, input_data.month, input_data.day, @@ -264,71 +296,73 @@ def gds_format_date(self, input_data, input_name=''): if tzoff is not None: total_seconds = tzoff.seconds + (86400 * tzoff.days) if total_seconds == 0: - _svalue += 'Z' + _svalue += "Z" else: if total_seconds < 0: - _svalue += '-' + _svalue += "-" total_seconds *= -1 else: - _svalue += '+' + _svalue += "+" hours = total_seconds // 3600 minutes = (total_seconds - (hours * 3600)) // 60 - _svalue += '{0:02d}:{1:02d}'.format( - hours, minutes) + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) except AttributeError: pass return _svalue + @classmethod def gds_parse_date(cls, input_data): tz = None - if input_data[-1] == 'Z': - tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") input_data = input_data[:-1] else: results = GeneratedsSuper.tzoff_pattern.search(input_data) if results is not None: - tzoff_parts = results.group(2).split(':') + tzoff_parts = results.group(2).split(":") tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) - if results.group(1) == '-': + if results.group(1) == "-": tzoff *= -1 - tz = GeneratedsSuper._FixedOffsetTZ( - tzoff, results.group(0)) + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) input_data = input_data[:-6] - dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%d") dt = dt.replace(tzinfo=tz) return dt.date() - def gds_validate_time(self, input_data, node=None, input_name=''): + + def gds_validate_time(self, input_data, node=None, input_name=""): return input_data - def gds_format_time(self, input_data, input_name=''): + + def gds_format_time(self, input_data, input_name=""): if input_data.microsecond == 0: - _svalue = '%02d:%02d:%02d' % ( + _svalue = "%02d:%02d:%02d" % ( input_data.hour, input_data.minute, input_data.second, ) else: - _svalue = '%02d:%02d:%02d.%s' % ( + _svalue = "%02d:%02d:%02d.%s" % ( input_data.hour, input_data.minute, input_data.second, - ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ("%f" % (float(input_data.microsecond) / 1000000))[2:], ) if input_data.tzinfo is not None: tzoff = input_data.tzinfo.utcoffset(input_data) if tzoff is not None: total_seconds = tzoff.seconds + (86400 * tzoff.days) if total_seconds == 0: - _svalue += 'Z' + _svalue += "Z" else: if total_seconds < 0: - _svalue += '-' + _svalue += "-" total_seconds *= -1 else: - _svalue += '+' + _svalue += "+" hours = total_seconds // 3600 minutes = (total_seconds - (hours * 3600)) // 60 - _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) return _svalue + def gds_validate_simple_patterns(self, patterns, target): # pat is a list of lists of strings/patterns. # The target value must match at least one of the patterns @@ -345,90 +379,101 @@ def gds_validate_simple_patterns(self, patterns, target): found1 = False break return found1 + @classmethod def gds_parse_time(cls, input_data): tz = None - if input_data[-1] == 'Z': - tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") input_data = input_data[:-1] else: results = GeneratedsSuper.tzoff_pattern.search(input_data) if results is not None: - tzoff_parts = results.group(2).split(':') + tzoff_parts = results.group(2).split(":") tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) - if results.group(1) == '-': + if results.group(1) == "-": tzoff *= -1 - tz = GeneratedsSuper._FixedOffsetTZ( - tzoff, results.group(0)) + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) input_data = input_data[:-6] - if len(input_data.split('.')) > 1: - dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + if len(input_data.split(".")) > 1: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S.%f") else: - dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S") dt = dt.replace(tzinfo=tz) return dt.time() + def gds_str_lower(self, instring): return instring.lower() + def get_path_(self, node): path_list = [] self.get_path_list_(node, path_list) path_list.reverse() - path = '/'.join(path_list) + path = "/".join(path_list) return path - Tag_strip_pattern_ = re_.compile(r'\{.*\}') + + Tag_strip_pattern_ = re_.compile(r"\{.*\}") + def get_path_list_(self, node, path_list): if node is None: return - tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + tag = GeneratedsSuper.Tag_strip_pattern_.sub("", node.tag) if tag: path_list.append(tag) self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): class_obj1 = default_class - if 'xsi' in node.nsmap: - classname = node.get('{%s}type' % node.nsmap['xsi']) + if "xsi" in node.nsmap: + classname = node.get("{%s}type" % node.nsmap["xsi"]) if classname is not None: - names = classname.split(':') + names = classname.split(":") if len(names) == 2: classname = names[1] class_obj2 = globals().get(classname) if class_obj2 is not None: class_obj1 = class_obj2 return class_obj1 + def gds_build_any(self, node, type_name=None): return None + @classmethod def gds_reverse_node_mapping(cls, mapping): return dict(((v, k) for k, v in mapping.items())) + @staticmethod def gds_encode(instring): if sys.version_info.major == 2: if ExternalEncoding: encoding = ExternalEncoding else: - encoding = 'utf-8' + encoding = "utf-8" return instring.encode(encoding) else: return instring + @staticmethod def convert_unicode(instring): if isinstance(instring, str): result = quote_xml(instring) elif sys.version_info.major == 2 and isinstance(instring, unicode): - result = quote_xml(instring).encode('utf8') + result = quote_xml(instring).encode("utf8") else: result = GeneratedsSuper.gds_encode(str(instring)) return result + def __eq__(self, other): if type(self) != type(other): return False return self.__dict__ == other.__dict__ + def __ne__(self, other): return not self.__eq__(other) - + def getSubclassFromModule_(module, class_): - '''Get the subclass of a class from a specific module.''' - name = class_.__name__ + 'Sub' + """Get the subclass of a class from a specific module.""" + name = class_.__name__ + "Sub" if hasattr(module, name): return getattr(module, name) else: @@ -454,10 +499,10 @@ def getSubclassFromModule_(module, class_): # Globals # -ExternalEncoding = '' -Tag_pattern_ = re_.compile(r'({.*})?(.*)') +ExternalEncoding = "" +Tag_pattern_ = re_.compile(r"({.*})?(.*)") String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") -Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +Namespace_extract_pat_ = re_.compile(r"{(.*)}(.*)") CDATA_pattern_ = re_.compile(r"", re_.DOTALL) # Change this to redirect the generated superclass module to use a @@ -472,21 +517,21 @@ def getSubclassFromModule_(module, class_): def showIndent(outfile, level, pretty_print=True): if pretty_print: for idx in range(level): - outfile.write(' ') + outfile.write(" ") def quote_xml(inStr): "Escape markup chars, but do not modify CDATA sections." if not inStr: - return '' - s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) - s2 = '' + return "" + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s2 = "" pos = 0 matchobjects = CDATA_pattern_.finditer(s1) for mo in matchobjects: - s3 = s1[pos:mo.start()] + s3 = s1[pos : mo.start()] s2 += quote_xml_aux(s3) - s2 += s1[mo.start():mo.end()] + s2 += s1[mo.start() : mo.end()] pos = mo.end() s3 = s1[pos:] s2 += quote_xml_aux(s3) @@ -494,17 +539,17 @@ def quote_xml(inStr): def quote_xml_aux(inStr): - s1 = inStr.replace('&', '&') - s1 = s1.replace('<', '<') - s1 = s1.replace('>', '>') + s1 = inStr.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") return s1 def quote_attrib(inStr): - s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) - s1 = s1.replace('&', '&') - s1 = s1.replace('<', '<') - s1 = s1.replace('>', '>') + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s1 = s1.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") if '"' in s1: if "'" in s1: s1 = '"%s"' % s1.replace('"', """) @@ -518,14 +563,14 @@ def quote_attrib(inStr): def quote_python(inStr): s1 = inStr if s1.find("'") == -1: - if s1.find('\n') == -1: + if s1.find("\n") == -1: return "'%s'" % s1 else: return "'''%s'''" % s1 else: if s1.find('"') != -1: s1 = s1.replace('"', '\\"') - if s1.find('\n') == -1: + if s1.find("\n") == -1: return '"%s"' % s1 else: return '"""%s"""' % s1 @@ -535,7 +580,7 @@ def get_all_text_(node): if node.text is not None: text = node.text else: - text = '' + text = "" for child in node: if child.tail is not None: text += child.tail @@ -544,7 +589,7 @@ def get_all_text_(node): def find_attr_value_(attr_name, node): attrs = node.attrib - attr_parts = attr_name.split(':') + attr_parts = attr_name.split(":") value = None if len(attr_parts) == 1: value = attrs.get(attr_name) @@ -552,7 +597,13 @@ def find_attr_value_(attr_name, node): prefix, name = attr_parts namespace = node.nsmap.get(prefix) if namespace is not None: - value = attrs.get('{%s}%s' % (namespace, name, )) + value = attrs.get( + "{%s}%s" + % ( + namespace, + name, + ) + ) return value @@ -561,7 +612,11 @@ class GDSParseError(Exception): def raise_parse_error(node, msg): - msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + msg = "%s (element %s/line %d)" % ( + msg, + node.tag, + node.sourceline, + ) raise GDSParseError(msg) @@ -581,51 +636,57 @@ class MixedContainer: TypeDouble = 6 TypeBoolean = 7 TypeBase64 = 8 + def __init__(self, category, content_type, name, value): self.category = category self.content_type = content_type self.name = name self.value = value + def getCategory(self): return self.category + def getContenttype(self, content_type): return self.content_type + def getValue(self): return self.value + def getName(self): return self.name - def export(self, outfile, level, name, namespace, - pretty_print=True): + + def export(self, outfile, level, name, namespace, pretty_print=True): if self.category == MixedContainer.CategoryText: # Prevent exporting empty content as empty lines. if self.value.strip(): outfile.write(self.value) elif self.category == MixedContainer.CategorySimple: self.exportSimple(outfile, level, name) - else: # category == MixedContainer.CategoryComplex + else: # category == MixedContainer.CategoryComplex self.value.export( - outfile, level, namespace, name_=name, - pretty_print=pretty_print) + outfile, level, namespace, name_=name, pretty_print=pretty_print + ) + def exportSimple(self, outfile, level, name): if self.content_type == MixedContainer.TypeString: - outfile.write('<%s>%s' % ( - self.name, self.value, self.name)) - elif self.content_type == MixedContainer.TypeInteger or \ - self.content_type == MixedContainer.TypeBoolean: - outfile.write('<%s>%d' % ( - self.name, self.value, self.name)) - elif self.content_type == MixedContainer.TypeFloat or \ - self.content_type == MixedContainer.TypeDecimal: - outfile.write('<%s>%f' % ( - self.name, self.value, self.name)) + outfile.write("<%s>%s" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + outfile.write("<%s>%d" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + outfile.write("<%s>%f" % (self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeDouble: - outfile.write('<%s>%g' % ( - self.name, self.value, self.name)) + outfile.write("<%s>%g" % (self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeBase64: - outfile.write('<%s>%s' % ( - self.name, - base64.b64encode(self.value), - self.name)) + outfile.write( + "<%s>%s" % (self.name, base64.b64encode(self.value), self.name) + ) + def to_etree(self, element): if self.category == MixedContainer.CategoryText: # Prevent exporting empty content as empty lines. @@ -641,77 +702,119 @@ def to_etree(self, element): else: element.text += self.value elif self.category == MixedContainer.CategorySimple: - subelement = etree_.SubElement( - element, '%s' % self.name) + subelement = etree_.SubElement(element, "%s" % self.name) subelement.text = self.to_etree_simple() - else: # category == MixedContainer.CategoryComplex + else: # category == MixedContainer.CategoryComplex self.value.to_etree(element) + def to_etree_simple(self): if self.content_type == MixedContainer.TypeString: text = self.value - elif (self.content_type == MixedContainer.TypeInteger or - self.content_type == MixedContainer.TypeBoolean): - text = '%d' % self.value - elif (self.content_type == MixedContainer.TypeFloat or - self.content_type == MixedContainer.TypeDecimal): - text = '%f' % self.value + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + text = "%d" % self.value + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + text = "%f" % self.value elif self.content_type == MixedContainer.TypeDouble: - text = '%g' % self.value + text = "%g" % self.value elif self.content_type == MixedContainer.TypeBase64: - text = '%s' % base64.b64encode(self.value) + text = "%s" % base64.b64encode(self.value) return text + def exportLiteral(self, outfile, level, name): if self.category == MixedContainer.CategoryText: showIndent(outfile, level) outfile.write( - 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( - self.category, self.content_type, - self.name, self.value)) + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) elif self.category == MixedContainer.CategorySimple: showIndent(outfile, level) outfile.write( - 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( - self.category, self.content_type, - self.name, self.value)) - else: # category == MixedContainer.CategoryComplex + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + else: # category == MixedContainer.CategoryComplex showIndent(outfile, level) outfile.write( - 'model_.MixedContainer(%d, %d, "%s",\n' % ( - self.category, self.content_type, self.name,)) + 'model_.MixedContainer(%d, %d, "%s",\n' + % ( + self.category, + self.content_type, + self.name, + ) + ) self.value.exportLiteral(outfile, level + 1) showIndent(outfile, level) - outfile.write(')\n') + outfile.write(")\n") class MemberSpec_(object): - def __init__(self, name='', data_type='', container=0, - optional=0, child_attrs=None, choice=None): + def __init__( + self, + name="", + data_type="", + container=0, + optional=0, + child_attrs=None, + choice=None, + ): self.name = name self.data_type = data_type self.container = container self.child_attrs = child_attrs self.choice = choice self.optional = optional - def set_name(self, name): self.name = name - def get_name(self): return self.name - def set_data_type(self, data_type): self.data_type = data_type - def get_data_type_chain(self): return self.data_type + + def set_name(self, name): + self.name = name + + def get_name(self): + return self.name + + def set_data_type(self, data_type): + self.data_type = data_type + + def get_data_type_chain(self): + return self.data_type + def get_data_type(self): if isinstance(self.data_type, list): if len(self.data_type) > 0: return self.data_type[-1] else: - return 'xs:string' + return "xs:string" else: return self.data_type - def set_container(self, container): self.container = container - def get_container(self): return self.container - def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs - def get_child_attrs(self): return self.child_attrs - def set_choice(self, choice): self.choice = choice - def get_choice(self): return self.choice - def set_optional(self, optional): self.optional = optional - def get_optional(self): return self.optional + + def set_container(self, container): + self.container = container + + def get_container(self): + return self.container + + def set_child_attrs(self, child_attrs): + self.child_attrs = child_attrs + + def get_child_attrs(self): + return self.child_attrs + + def set_choice(self, choice): + self.choice = choice + + def get_choice(self): + return self.choice + + def set_optional(self, optional): + self.optional = optional + + def get_optional(self): + return self.optional def _cast(typ, value): @@ -719,124 +822,182 @@ def _cast(typ, value): return value return typ(value) + # # Data representation classes. # class BlockTypes(object): - VOLTAGE_CONC_DEP_BLOCK_MECHANISM='voltageConcDepBlockMechanism' + VOLTAGE_CONC_DEP_BLOCK_MECHANISM = "voltageConcDepBlockMechanism" class Metric(object): - PATH_LENGTHFROMROOT='Path Length from root' + PATH_LENGTHFROMROOT = "Path Length from root" class PlasticityTypes(object): - TSODYKS_MARKRAM_DEP_MECHANISM='tsodyksMarkramDepMechanism' - TSODYKS_MARKRAM_DEP_FAC_MECHANISM='tsodyksMarkramDepFacMechanism' + TSODYKS_MARKRAM_DEP_MECHANISM = "tsodyksMarkramDepMechanism" + TSODYKS_MARKRAM_DEP_FAC_MECHANISM = "tsodyksMarkramDepFacMechanism" class ZeroOrOne(object): - _0='0' - _1='1' + _0 = "0" + _1 = "1" class allowedSpaces(object): - EUCLIDEAN__1_D='Euclidean_1D' - EUCLIDEAN__2_D='Euclidean_2D' - EUCLIDEAN__3_D='Euclidean_3D' - GRID__1_D='Grid_1D' - GRID__2_D='Grid_2D' - GRID__3_D='Grid_3D' + EUCLIDEAN__1_D = "Euclidean_1D" + EUCLIDEAN__2_D = "Euclidean_2D" + EUCLIDEAN__3_D = "Euclidean_3D" + GRID__1_D = "Grid_1D" + GRID__2_D = "Grid_2D" + GRID__3_D = "Grid_3D" class channelTypes(object): - ION_CHANNEL_PASSIVE='ionChannelPassive' - ION_CHANNEL_HH='ionChannelHH' + ION_CHANNEL_PASSIVE = "ionChannelPassive" + ION_CHANNEL_HH = "ionChannelHH" class gateTypes(object): - GATE_H_HRATES='gateHHrates' - GATE_H_HRATES_TAU='gateHHratesTau' - GATE_H_HTAU_INF='gateHHtauInf' - GATE_H_HRATES_INF='gateHHratesInf' - GATE_H_HRATES_TAU_INF='gateHHratesTauInf' - GATE_HH_INSTANTANEOUS='gateHHInstantaneous' - GATE_KS='gateKS' - GATE_FRACTIONAL='gateFractional' + GATE_H_HRATES = "gateHHrates" + GATE_H_HRATES_TAU = "gateHHratesTau" + GATE_H_HTAU_INF = "gateHHtauInf" + GATE_H_HRATES_INF = "gateHHratesInf" + GATE_H_HRATES_TAU_INF = "gateHHratesTauInf" + GATE_HH_INSTANTANEOUS = "gateHHInstantaneous" + GATE_KS = "gateKS" + GATE_FRACTIONAL = "gateFractional" class networkTypes(object): - NETWORK='network' - NETWORK_WITH_TEMPERATURE='networkWithTemperature' + NETWORK = "network" + NETWORK_WITH_TEMPERATURE = "networkWithTemperature" class populationTypes(object): - POPULATION='population' - POPULATION_LIST='populationList' + POPULATION = "population" + POPULATION_LIST = "populationList" class Property(GeneratedsSuper): """Generic property with a tag and value""" + member_data_items_ = [ - MemberSpec_('tag', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('value', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_("tag", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("value", "xs:string", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, tag=None, value=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.tag = _cast(None, tag) self.value = _cast(None, value) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Property) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Property) if subclass is not None: return subclass(*args_, **kwargs_) if Property.subclass: return Property.subclass(*args_, **kwargs_) else: return Property(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Property', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Property') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Property", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Property") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Property') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Property" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Property', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Property'): - if self.tag is not None and 'tag' not in already_processed: - already_processed.add('tag') - outfile.write(' tag=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tag), input_name='tag')), )) - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Property', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Property", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Property" + ): + if self.tag is not None and "tag" not in already_processed: + already_processed.add("tag") + outfile.write( + " tag=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.tag), input_name="tag") + ), + ) + ) + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Property", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -844,82 +1005,138 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tag', node) - if value is not None and 'tag' not in already_processed: - already_processed.add('tag') + value = find_attr_value_("tag", node) + if value is not None and "tag" not in already_processed: + already_processed.add("tag") self.tag = value - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class Property class Annotation(GeneratedsSuper): """Placeholder for MIRIAM related metadata, among others.""" + member_data_items_ = [ - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_( + "__ANY__", + "__ANY__", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"processContents": u"skip", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = None + def __init__(self, anytypeobjs_=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") if anytypeobjs_ is None: self.anytypeobjs_ = [] else: self.anytypeobjs_ = anytypeobjs_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Annotation) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Annotation) if subclass is not None: return subclass(*args_, **kwargs_) if Annotation.subclass: return Annotation.subclass(*args_, **kwargs_) else: return Annotation(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - self.anytypeobjs_ - ): + if self.anytypeobjs_: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Annotation', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Annotation') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Annotation", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Annotation") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Annotation') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Annotation" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Annotation', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Annotation", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Annotation'): + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Annotation" + ): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Annotation', fromsubclass_=False, pretty_print=True): + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Annotation", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for obj_ in self.anytypeobjs_: obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -927,35 +1144,138 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'Annotation') + obj_ = self.gds_build_any(child_, "Annotation") if obj_ is not None: self.add_anytypeobjs_(obj_) + + # end class Annotation class ComponentType(GeneratedsSuper): """Contains an extension to NeuroML by creating custom LEMS ComponentType.""" + member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('extends', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('Property', 'Property', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'LEMS_Property', u'name': u'Property', u'minOccurs': u'0'}, None), - MemberSpec_('Parameter', 'Parameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Parameter', u'name': u'Parameter', u'minOccurs': u'0'}, None), - MemberSpec_('Constant', 'Constant', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Constant', u'name': u'Constant', u'minOccurs': u'0'}, None), - MemberSpec_('Exposure', 'Exposure', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Exposure', u'name': u'Exposure', u'minOccurs': u'0'}, None), - MemberSpec_('Requirement', 'Requirement', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Requirement', u'name': u'Requirement', u'minOccurs': u'0'}, None), - MemberSpec_('InstanceRequirement', 'InstanceRequirement', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InstanceRequirement', u'name': u'InstanceRequirement', u'minOccurs': u'0'}, None), - MemberSpec_('Dynamics', 'Dynamics', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Dynamics', u'name': u'Dynamics', u'minOccurs': u'0'}, None), + MemberSpec_("name", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("extends", "xs:string", 0, 1, {"use": u"optional"}), + MemberSpec_("description", "xs:string", 0, 1, {"use": u"optional"}), + MemberSpec_( + "Property", + "Property", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"LEMS_Property", + u"name": u"Property", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "Parameter", + "Parameter", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Parameter", + u"name": u"Parameter", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "Constant", + "Constant", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Constant", + u"name": u"Constant", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "Exposure", + "Exposure", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Exposure", + u"name": u"Exposure", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "Requirement", + "Requirement", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Requirement", + u"name": u"Requirement", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "InstanceRequirement", + "InstanceRequirement", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"InstanceRequirement", + u"name": u"InstanceRequirement", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "Dynamics", + "Dynamics", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Dynamics", + u"name": u"Dynamics", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = None - def __init__(self, name=None, extends=None, description=None, Property=None, Parameter=None, Constant=None, Exposure=None, Requirement=None, InstanceRequirement=None, Dynamics=None, **kwargs_): + + def __init__( + self, + name=None, + extends=None, + description=None, + Property=None, + Parameter=None, + Constant=None, + Exposure=None, + Requirement=None, + InstanceRequirement=None, + Dynamics=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.name = _cast(None, name) self.extends = _cast(None, extends) self.description = _cast(None, description) @@ -987,80 +1307,202 @@ def __init__(self, name=None, extends=None, description=None, Property=None, Par self.Dynamics = [] else: self.Dynamics = Dynamics + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ComponentType) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ComponentType) if subclass is not None: return subclass(*args_, **kwargs_) if ComponentType.subclass: return ComponentType.subclass(*args_, **kwargs_) else: return ComponentType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.Property or - self.Parameter or - self.Constant or - self.Exposure or - self.Requirement or - self.InstanceRequirement or - self.Dynamics + self.Property + or self.Parameter + or self.Constant + or self.Exposure + or self.Requirement + or self.InstanceRequirement + or self.Dynamics ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ComponentType', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ComponentType') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ComponentType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ComponentType") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ComponentType') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ComponentType" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ComponentType', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ComponentType", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ComponentType'): - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - if self.extends is not None and 'extends' not in already_processed: - already_processed.add('extends') - outfile.write(' extends=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.extends), input_name='extends')), )) - if self.description is not None and 'description' not in already_processed: - already_processed.add('description') - outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ComponentType', fromsubclass_=False, pretty_print=True): + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ComponentType", + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.extends is not None and "extends" not in already_processed: + already_processed.add("extends") + outfile.write( + " extends=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.extends), input_name="extends" + ) + ), + ) + ) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ComponentType", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for Property_ in self.Property: - Property_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Property', pretty_print=pretty_print) + Property_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Property", + pretty_print=pretty_print, + ) for Parameter_ in self.Parameter: - Parameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Parameter', pretty_print=pretty_print) + Parameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Parameter", + pretty_print=pretty_print, + ) for Constant_ in self.Constant: - Constant_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Constant', pretty_print=pretty_print) + Constant_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Constant", + pretty_print=pretty_print, + ) for Exposure_ in self.Exposure: - Exposure_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Exposure', pretty_print=pretty_print) + Exposure_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Exposure", + pretty_print=pretty_print, + ) for Requirement_ in self.Requirement: - Requirement_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Requirement', pretty_print=pretty_print) + Requirement_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Requirement", + pretty_print=pretty_print, + ) for InstanceRequirement_ in self.InstanceRequirement: - InstanceRequirement_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='InstanceRequirement', pretty_print=pretty_print) + InstanceRequirement_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="InstanceRequirement", + pretty_print=pretty_print, + ) for Dynamics_ in self.Dynamics: - Dynamics_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Dynamics', pretty_print=pretty_print) + Dynamics_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Dynamics", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -1068,135 +1510,220 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - value = find_attr_value_('extends', node) - if value is not None and 'extends' not in already_processed: - already_processed.add('extends') + value = find_attr_value_("extends", node) + if value is not None and "extends" not in already_processed: + already_processed.add("extends") self.extends = value - value = find_attr_value_('description', node) - if value is not None and 'description' not in already_processed: - already_processed.add('description') + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") self.description = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'Property': + if nodeName_ == "Property": obj_ = LEMS_Property.factory(parent_object_=self) obj_.build(child_) self.Property.append(obj_) - obj_.original_tagname_ = 'Property' - elif nodeName_ == 'Parameter': + obj_.original_tagname_ = "Property" + elif nodeName_ == "Parameter": obj_ = Parameter.factory(parent_object_=self) obj_.build(child_) self.Parameter.append(obj_) - obj_.original_tagname_ = 'Parameter' - elif nodeName_ == 'Constant': + obj_.original_tagname_ = "Parameter" + elif nodeName_ == "Constant": obj_ = Constant.factory(parent_object_=self) obj_.build(child_) self.Constant.append(obj_) - obj_.original_tagname_ = 'Constant' - elif nodeName_ == 'Exposure': + obj_.original_tagname_ = "Constant" + elif nodeName_ == "Exposure": obj_ = Exposure.factory(parent_object_=self) obj_.build(child_) self.Exposure.append(obj_) - obj_.original_tagname_ = 'Exposure' - elif nodeName_ == 'Requirement': + obj_.original_tagname_ = "Exposure" + elif nodeName_ == "Requirement": obj_ = Requirement.factory(parent_object_=self) obj_.build(child_) self.Requirement.append(obj_) - obj_.original_tagname_ = 'Requirement' - elif nodeName_ == 'InstanceRequirement': + obj_.original_tagname_ = "Requirement" + elif nodeName_ == "InstanceRequirement": obj_ = InstanceRequirement.factory(parent_object_=self) obj_.build(child_) self.InstanceRequirement.append(obj_) - obj_.original_tagname_ = 'InstanceRequirement' - elif nodeName_ == 'Dynamics': + obj_.original_tagname_ = "InstanceRequirement" + elif nodeName_ == "Dynamics": obj_ = Dynamics.factory(parent_object_=self) obj_.build(child_) self.Dynamics.append(obj_) - obj_.original_tagname_ = 'Dynamics' + obj_.original_tagname_ = "Dynamics" + + # end class ComponentType class Constant(GeneratedsSuper): """LEMS ComponentType for Constant.""" + member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('dimension', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('value', 'Nml2Quantity', 0, 0, {'use': u'required'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_("name", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("dimension", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("value", "Nml2Quantity", 0, 0, {"use": u"required"}), + MemberSpec_("description", "xs:string", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None - def __init__(self, name=None, dimension=None, value=None, description=None, **kwargs_): + + def __init__( + self, name=None, dimension=None, value=None, description=None, **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.name = _cast(None, name) self.dimension = _cast(None, dimension) self.value = _cast(None, value) self.description = _cast(None, description) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Constant) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Constant) if subclass is not None: return subclass(*args_, **kwargs_) if Constant.subclass: return Constant.subclass(*args_, **kwargs_) else: return Constant(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity(self, value): # Validate type Nml2Quantity, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_patterns_, )) - validate_Nml2Quantity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*([_a-zA-Z0-9])*$']] - def hasContent_(self): - if ( + self.validate_Nml2Quantity_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_patterns_, + ) + ) - ): + validate_Nml2Quantity_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*([_a-zA-Z0-9])*$"] + ] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Constant', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Constant') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Constant", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Constant") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Constant') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Constant" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Constant', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Constant'): - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - if self.dimension is not None and 'dimension' not in already_processed: - already_processed.add('dimension') - outfile.write(' dimension=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dimension), input_name='dimension')), )) - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (quote_attrib(self.value), )) - if self.description is not None and 'description' not in already_processed: - already_processed.add('description') - outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Constant', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Constant", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Constant" + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.dimension is not None and "dimension" not in already_processed: + already_processed.add("dimension") + outfile.write( + " dimension=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.dimension), input_name="dimension" + ) + ), + ) + ) + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write(" value=%s" % (quote_attrib(self.value),)) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Constant", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -1204,94 +1731,166 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - value = find_attr_value_('dimension', node) - if value is not None and 'dimension' not in already_processed: - already_processed.add('dimension') + value = find_attr_value_("dimension", node) + if value is not None and "dimension" not in already_processed: + already_processed.add("dimension") self.dimension = value - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - self.validate_Nml2Quantity(self.value) # validate type Nml2Quantity - value = find_attr_value_('description', node) - if value is not None and 'description' not in already_processed: - already_processed.add('description') + self.validate_Nml2Quantity(self.value) # validate type Nml2Quantity + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") self.description = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class Constant class Exposure(GeneratedsSuper): """LEMS Exposure (ComponentType property)""" + member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('dimension', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_("name", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("dimension", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("description", "xs:string", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None + def __init__(self, name=None, dimension=None, description=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.name = _cast(None, name) self.dimension = _cast(None, dimension) self.description = _cast(None, description) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Exposure) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Exposure) if subclass is not None: return subclass(*args_, **kwargs_) if Exposure.subclass: return Exposure.subclass(*args_, **kwargs_) else: return Exposure(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Exposure', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Exposure') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Exposure", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Exposure") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Exposure') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Exposure" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Exposure', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Exposure'): - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - if self.dimension is not None and 'dimension' not in already_processed: - already_processed.add('dimension') - outfile.write(' dimension=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dimension), input_name='dimension')), )) - if self.description is not None and 'description' not in already_processed: - already_processed.add('description') - outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Exposure', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Exposure", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Exposure" + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.dimension is not None and "dimension" not in already_processed: + already_processed.add("dimension") + outfile.write( + " dimension=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.dimension), input_name="dimension" + ) + ), + ) + ) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Exposure", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -1299,93 +1898,182 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - value = find_attr_value_('dimension', node) - if value is not None and 'dimension' not in already_processed: - already_processed.add('dimension') + value = find_attr_value_("dimension", node) + if value is not None and "dimension" not in already_processed: + already_processed.add("dimension") self.dimension = value - value = find_attr_value_('description', node) - if value is not None and 'description' not in already_processed: - already_processed.add('description') + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") self.description = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class Exposure class NamedDimensionalType(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('dimension', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_("name", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("dimension", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("description", "xs:string", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None - def __init__(self, name=None, dimension=None, description=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.name = _cast(None, name) self.dimension = _cast(None, dimension) self.description = _cast(None, description) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, NamedDimensionalType) + CurrentSubclassModule_, NamedDimensionalType + ) if subclass is not None: return subclass(*args_, **kwargs_) if NamedDimensionalType.subclass: return NamedDimensionalType.subclass(*args_, **kwargs_) else: return NamedDimensionalType(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NamedDimensionalType', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('NamedDimensionalType') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="NamedDimensionalType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("NamedDimensionalType") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NamedDimensionalType') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="NamedDimensionalType", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NamedDimensionalType', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NamedDimensionalType'): - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - if self.dimension is not None and 'dimension' not in already_processed: - already_processed.add('dimension') - outfile.write(' dimension=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dimension), input_name='dimension')), )) - if self.description is not None and 'description' not in already_processed: - already_processed.add('description') - outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="NamedDimensionalType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="NamedDimensionalType", + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.dimension is not None and "dimension" not in already_processed: + already_processed.add("dimension") + outfile.write( + " dimension=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.dimension), input_name="dimension" + ) + ), + ) + ) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NamedDimensionalType', fromsubclass_=False, pretty_print=True): + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="NamedDimensionalType", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -1393,102 +2081,201 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - value = find_attr_value_('dimension', node) - if value is not None and 'dimension' not in already_processed: - already_processed.add('dimension') + value = find_attr_value_("dimension", node) + if value is not None and "dimension" not in already_processed: + already_processed.add("dimension") self.dimension = value - value = find_attr_value_('description', node) - if value is not None and 'description' not in already_processed: - already_processed.add('description') + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") self.description = value - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class NamedDimensionalType class NamedDimensionalVariable(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('dimension', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('exposure', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_("name", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("dimension", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("description", "xs:string", 0, 1, {"use": u"optional"}), + MemberSpec_("exposure", "xs:string", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None - def __init__(self, name=None, dimension=None, description=None, exposure=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + exposure=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.name = _cast(None, name) self.dimension = _cast(None, dimension) self.description = _cast(None, description) self.exposure = _cast(None, exposure) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, NamedDimensionalVariable) + CurrentSubclassModule_, NamedDimensionalVariable + ) if subclass is not None: return subclass(*args_, **kwargs_) if NamedDimensionalVariable.subclass: return NamedDimensionalVariable.subclass(*args_, **kwargs_) else: return NamedDimensionalVariable(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NamedDimensionalVariable', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('NamedDimensionalVariable') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="NamedDimensionalVariable", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("NamedDimensionalVariable") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NamedDimensionalVariable') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="NamedDimensionalVariable", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NamedDimensionalVariable', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NamedDimensionalVariable'): - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - if self.dimension is not None and 'dimension' not in already_processed: - already_processed.add('dimension') - outfile.write(' dimension=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dimension), input_name='dimension')), )) - if self.description is not None and 'description' not in already_processed: - already_processed.add('description') - outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - if self.exposure is not None and 'exposure' not in already_processed: - already_processed.add('exposure') - outfile.write(' exposure=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.exposure), input_name='exposure')), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="NamedDimensionalVariable", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="NamedDimensionalVariable", + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.dimension is not None and "dimension" not in already_processed: + already_processed.add("dimension") + outfile.write( + " dimension=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.dimension), input_name="dimension" + ) + ), + ) + ) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + if self.exposure is not None and "exposure" not in already_processed: + already_processed.add("exposure") + outfile.write( + " exposure=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.exposure), input_name="exposure" + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NamedDimensionalVariable', fromsubclass_=False, pretty_print=True): + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="NamedDimensionalVariable", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -1496,84 +2283,131 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - value = find_attr_value_('dimension', node) - if value is not None and 'dimension' not in already_processed: - already_processed.add('dimension') + value = find_attr_value_("dimension", node) + if value is not None and "dimension" not in already_processed: + already_processed.add("dimension") self.dimension = value - value = find_attr_value_('description', node) - if value is not None and 'description' not in already_processed: - already_processed.add('description') + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") self.description = value - value = find_attr_value_('exposure', node) - if value is not None and 'exposure' not in already_processed: - already_processed.add('exposure') + value = find_attr_value_("exposure", node) + if value is not None and "exposure" not in already_processed: + already_processed.add("exposure") self.exposure = value - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class NamedDimensionalVariable class Parameter(NamedDimensionalType): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = NamedDimensionalType + def __init__(self, name=None, dimension=None, description=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Parameter, self).__init__(name, dimension, description, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Parameter, self).__init__(name, dimension, description, **kwargs_) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Parameter) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Parameter) if subclass is not None: return subclass(*args_, **kwargs_) if Parameter.subclass: return Parameter.subclass(*args_, **kwargs_) else: return Parameter(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(Parameter, self).hasContent_() - ): + if super(Parameter, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Parameter', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Parameter') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Parameter", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Parameter") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Parameter') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Parameter" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Parameter', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Parameter'): - super(Parameter, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Parameter') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Parameter', fromsubclass_=False, pretty_print=True): - super(Parameter, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Parameter", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Parameter" + ): + super(Parameter, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Parameter" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Parameter", + fromsubclass_=False, + pretty_print=True, + ): + super(Parameter, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -1581,71 +2415,129 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(Parameter, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(Parameter, self).buildChildren(child_, node, nodeName_, True) pass + + # end class Parameter class LEMS_Property(NamedDimensionalType): member_data_items_ = [ - MemberSpec_('default_value', 'xs:double', 0, 1, {'use': u'optional'}), + MemberSpec_("default_value", "xs:double", 0, 1, {"use": u"optional"}), ] subclass = None superclass = NamedDimensionalType - def __init__(self, name=None, dimension=None, description=None, default_value=None, **kwargs_): + + def __init__( + self, name=None, dimension=None, description=None, default_value=None, **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(LEMS_Property, self).__init__(name, dimension, description, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(LEMS_Property, self).__init__(name, dimension, description, **kwargs_) self.default_value = _cast(float, default_value) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, LEMS_Property) + subclass = getSubclassFromModule_(CurrentSubclassModule_, LEMS_Property) if subclass is not None: return subclass(*args_, **kwargs_) if LEMS_Property.subclass: return LEMS_Property.subclass(*args_, **kwargs_) else: return LEMS_Property(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(LEMS_Property, self).hasContent_() - ): + if super(LEMS_Property, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LEMS_Property', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('LEMS_Property') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="LEMS_Property", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("LEMS_Property") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LEMS_Property') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="LEMS_Property" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LEMS_Property', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LEMS_Property'): - super(LEMS_Property, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LEMS_Property') - if self.default_value is not None and 'default_value' not in already_processed: - already_processed.add('default_value') - outfile.write(' defaultValue="%s"' % self.gds_format_double(self.default_value, input_name='defaultValue')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LEMS_Property', fromsubclass_=False, pretty_print=True): - super(LEMS_Property, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="LEMS_Property", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="LEMS_Property", + ): + super(LEMS_Property, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="LEMS_Property" + ) + if self.default_value is not None and "default_value" not in already_processed: + already_processed.add("default_value") + outfile.write( + ' defaultValue="%s"' + % self.gds_format_double(self.default_value, input_name="defaultValue") + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="LEMS_Property", + fromsubclass_=False, + pretty_print=True, + ): + super(LEMS_Property, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -1653,73 +2545,125 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('defaultValue', node) - if value is not None and 'defaultValue' not in already_processed: - already_processed.add('defaultValue') + value = find_attr_value_("defaultValue", node) + if value is not None and "defaultValue" not in already_processed: + already_processed.add("defaultValue") try: self.default_value = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (defaultValue): %s' % exp) + raise ValueError("Bad float/double attribute (defaultValue): %s" % exp) super(LEMS_Property, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(LEMS_Property, self).buildChildren(child_, node, nodeName_, True) pass + + # end class LEMS_Property class Requirement(NamedDimensionalType): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = NamedDimensionalType + def __init__(self, name=None, dimension=None, description=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Requirement, self).__init__(name, dimension, description, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Requirement, self).__init__(name, dimension, description, **kwargs_) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Requirement) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Requirement) if subclass is not None: return subclass(*args_, **kwargs_) if Requirement.subclass: return Requirement.subclass(*args_, **kwargs_) else: return Requirement(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(Requirement, self).hasContent_() - ): + if super(Requirement, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Requirement', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Requirement') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Requirement", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Requirement") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Requirement') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Requirement" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Requirement', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Requirement'): - super(Requirement, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Requirement') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Requirement', fromsubclass_=False, pretty_print=True): - super(Requirement, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Requirement", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Requirement", + ): + super(Requirement, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Requirement" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Requirement", + fromsubclass_=False, + pretty_print=True, + ): + super(Requirement, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -1727,73 +2671,146 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(Requirement, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(Requirement, self).buildChildren(child_, node, nodeName_, True) pass + + # end class Requirement class InstanceRequirement(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('type', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_("name", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("type", "xs:string", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, name=None, type=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.name = _cast(None, name) self.type = _cast(None, type) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, InstanceRequirement) + CurrentSubclassModule_, InstanceRequirement + ) if subclass is not None: return subclass(*args_, **kwargs_) if InstanceRequirement.subclass: return InstanceRequirement.subclass(*args_, **kwargs_) else: return InstanceRequirement(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InstanceRequirement', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InstanceRequirement') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InstanceRequirement", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InstanceRequirement") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InstanceRequirement') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="InstanceRequirement", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InstanceRequirement', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InstanceRequirement'): - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InstanceRequirement', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InstanceRequirement", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="InstanceRequirement", + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InstanceRequirement", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -1801,33 +2818,94 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class InstanceRequirement class Dynamics(GeneratedsSuper): """LEMS ComponentType for Dynamics""" + member_data_items_ = [ - MemberSpec_('StateVariable', 'StateVariable', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'StateVariable', u'name': u'StateVariable', u'minOccurs': u'0'}, None), - MemberSpec_('DerivedVariable', 'DerivedVariable', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'DerivedVariable', u'name': u'DerivedVariable', u'minOccurs': u'0'}, None), - MemberSpec_('ConditionalDerivedVariable', 'ConditionalDerivedVariable', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ConditionalDerivedVariable', u'name': u'ConditionalDerivedVariable', u'minOccurs': u'0'}, None), - MemberSpec_('TimeDerivative', 'TimeDerivative', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'TimeDerivative', u'name': u'TimeDerivative', u'minOccurs': u'0'}, None), + MemberSpec_( + "StateVariable", + "StateVariable", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"StateVariable", + u"name": u"StateVariable", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "DerivedVariable", + "DerivedVariable", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"DerivedVariable", + u"name": u"DerivedVariable", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "ConditionalDerivedVariable", + "ConditionalDerivedVariable", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ConditionalDerivedVariable", + u"name": u"ConditionalDerivedVariable", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "TimeDerivative", + "TimeDerivative", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"TimeDerivative", + u"name": u"TimeDerivative", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = None - def __init__(self, StateVariable=None, DerivedVariable=None, ConditionalDerivedVariable=None, TimeDerivative=None, **kwargs_): + + def __init__( + self, + StateVariable=None, + DerivedVariable=None, + ConditionalDerivedVariable=None, + TimeDerivative=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") if StateVariable is None: self.StateVariable = [] else: @@ -1844,63 +2922,132 @@ def __init__(self, StateVariable=None, DerivedVariable=None, ConditionalDerivedV self.TimeDerivative = [] else: self.TimeDerivative = TimeDerivative + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Dynamics) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Dynamics) if subclass is not None: return subclass(*args_, **kwargs_) if Dynamics.subclass: return Dynamics.subclass(*args_, **kwargs_) else: return Dynamics(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.StateVariable or - self.DerivedVariable or - self.ConditionalDerivedVariable or - self.TimeDerivative + self.StateVariable + or self.DerivedVariable + or self.ConditionalDerivedVariable + or self.TimeDerivative ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Dynamics', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Dynamics') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Dynamics", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Dynamics") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Dynamics') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Dynamics" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Dynamics', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Dynamics", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Dynamics'): + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Dynamics" + ): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Dynamics', fromsubclass_=False, pretty_print=True): + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Dynamics", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for StateVariable_ in self.StateVariable: - StateVariable_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='StateVariable', pretty_print=pretty_print) + StateVariable_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="StateVariable", + pretty_print=pretty_print, + ) for DerivedVariable_ in self.DerivedVariable: - DerivedVariable_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DerivedVariable', pretty_print=pretty_print) + DerivedVariable_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="DerivedVariable", + pretty_print=pretty_print, + ) for ConditionalDerivedVariable_ in self.ConditionalDerivedVariable: - ConditionalDerivedVariable_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ConditionalDerivedVariable', pretty_print=pretty_print) + ConditionalDerivedVariable_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ConditionalDerivedVariable", + pretty_print=pretty_print, + ) for TimeDerivative_ in self.TimeDerivative: - TimeDerivative_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TimeDerivative', pretty_print=pretty_print) + TimeDerivative_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="TimeDerivative", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -1908,95 +3055,178 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'StateVariable': + if nodeName_ == "StateVariable": obj_ = StateVariable.factory(parent_object_=self) obj_.build(child_) self.StateVariable.append(obj_) - obj_.original_tagname_ = 'StateVariable' - elif nodeName_ == 'DerivedVariable': + obj_.original_tagname_ = "StateVariable" + elif nodeName_ == "DerivedVariable": obj_ = DerivedVariable.factory(parent_object_=self) obj_.build(child_) self.DerivedVariable.append(obj_) - obj_.original_tagname_ = 'DerivedVariable' - elif nodeName_ == 'ConditionalDerivedVariable': + obj_.original_tagname_ = "DerivedVariable" + elif nodeName_ == "ConditionalDerivedVariable": obj_ = ConditionalDerivedVariable.factory(parent_object_=self) obj_.build(child_) self.ConditionalDerivedVariable.append(obj_) - obj_.original_tagname_ = 'ConditionalDerivedVariable' - elif nodeName_ == 'TimeDerivative': + obj_.original_tagname_ = "ConditionalDerivedVariable" + elif nodeName_ == "TimeDerivative": obj_ = TimeDerivative.factory(parent_object_=self) obj_.build(child_) self.TimeDerivative.append(obj_) - obj_.original_tagname_ = 'TimeDerivative' + obj_.original_tagname_ = "TimeDerivative" + + # end class Dynamics class DerivedVariable(NamedDimensionalVariable): """LEMS ComponentType for DerivedVariable""" + member_data_items_ = [ - MemberSpec_('value', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('select', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_("value", "xs:string", 0, 1, {"use": u"optional"}), + MemberSpec_("select", "xs:string", 0, 1, {"use": u"optional"}), ] subclass = None superclass = NamedDimensionalVariable - def __init__(self, name=None, dimension=None, description=None, exposure=None, value=None, select=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + exposure=None, + value=None, + select=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(DerivedVariable, self).__init__(name, dimension, description, exposure, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(DerivedVariable, self).__init__( + name, dimension, description, exposure, **kwargs_ + ) self.value = _cast(None, value) self.select = _cast(None, select) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, DerivedVariable) + subclass = getSubclassFromModule_(CurrentSubclassModule_, DerivedVariable) if subclass is not None: return subclass(*args_, **kwargs_) if DerivedVariable.subclass: return DerivedVariable.subclass(*args_, **kwargs_) else: return DerivedVariable(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(DerivedVariable, self).hasContent_() - ): + if super(DerivedVariable, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DerivedVariable', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('DerivedVariable') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DerivedVariable", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("DerivedVariable") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DerivedVariable') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="DerivedVariable" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DerivedVariable', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DerivedVariable'): - super(DerivedVariable, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DerivedVariable') - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - if self.select is not None and 'select' not in already_processed: - already_processed.add('select') - outfile.write(' select=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.select), input_name='select')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DerivedVariable', fromsubclass_=False, pretty_print=True): - super(DerivedVariable, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="DerivedVariable", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="DerivedVariable", + ): + super(DerivedVariable, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="DerivedVariable" + ) + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + if self.select is not None and "select" not in already_processed: + already_processed.add("select") + outfile.write( + " select=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.select), input_name="select" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DerivedVariable", + fromsubclass_=False, + pretty_print=True, + ): + super(DerivedVariable, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -2004,74 +3234,130 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - value = find_attr_value_('select', node) - if value is not None and 'select' not in already_processed: - already_processed.add('select') + value = find_attr_value_("select", node) + if value is not None and "select" not in already_processed: + already_processed.add("select") self.select = value super(DerivedVariable, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(DerivedVariable, self).buildChildren(child_, node, nodeName_, True) pass + + # end class DerivedVariable class StateVariable(NamedDimensionalVariable): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = NamedDimensionalVariable - def __init__(self, name=None, dimension=None, description=None, exposure=None, **kwargs_): + + def __init__( + self, name=None, dimension=None, description=None, exposure=None, **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(StateVariable, self).__init__(name, dimension, description, exposure, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(StateVariable, self).__init__( + name, dimension, description, exposure, **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, StateVariable) + subclass = getSubclassFromModule_(CurrentSubclassModule_, StateVariable) if subclass is not None: return subclass(*args_, **kwargs_) if StateVariable.subclass: return StateVariable.subclass(*args_, **kwargs_) else: return StateVariable(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(StateVariable, self).hasContent_() - ): + if super(StateVariable, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StateVariable', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('StateVariable') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="StateVariable", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("StateVariable") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='StateVariable') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="StateVariable" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='StateVariable', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='StateVariable'): - super(StateVariable, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='StateVariable') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StateVariable', fromsubclass_=False, pretty_print=True): - super(StateVariable, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="StateVariable", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="StateVariable", + ): + super(StateVariable, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="StateVariable" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="StateVariable", + fromsubclass_=False, + pretty_print=True, + ): + super(StateVariable, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -2079,79 +3365,171 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(StateVariable, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(StateVariable, self).buildChildren(child_, node, nodeName_, True) pass + + # end class StateVariable class ConditionalDerivedVariable(NamedDimensionalVariable): """LEMS ComponentType for ConditionalDerivedVariable""" + member_data_items_ = [ - MemberSpec_('Case', 'Case', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'Case', u'name': u'Case', u'minOccurs': u'1'}, None), + MemberSpec_( + "Case", + "Case", + 1, + 0, + { + u"maxOccurs": u"unbounded", + u"type": u"Case", + u"name": u"Case", + u"minOccurs": u"1", + }, + None, + ), ] subclass = None superclass = NamedDimensionalVariable - def __init__(self, name=None, dimension=None, description=None, exposure=None, Case=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + exposure=None, + Case=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ConditionalDerivedVariable, self).__init__(name, dimension, description, exposure, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ConditionalDerivedVariable, self).__init__( + name, dimension, description, exposure, **kwargs_ + ) if Case is None: self.Case = [] else: self.Case = Case + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ConditionalDerivedVariable) + CurrentSubclassModule_, ConditionalDerivedVariable + ) if subclass is not None: return subclass(*args_, **kwargs_) if ConditionalDerivedVariable.subclass: return ConditionalDerivedVariable.subclass(*args_, **kwargs_) else: return ConditionalDerivedVariable(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - self.Case or - super(ConditionalDerivedVariable, self).hasContent_() - ): + if self.Case or super(ConditionalDerivedVariable, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConditionalDerivedVariable', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ConditionalDerivedVariable') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ConditionalDerivedVariable", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ConditionalDerivedVariable") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConditionalDerivedVariable') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ConditionalDerivedVariable", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ConditionalDerivedVariable', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ConditionalDerivedVariable", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ConditionalDerivedVariable'): - super(ConditionalDerivedVariable, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConditionalDerivedVariable') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConditionalDerivedVariable', fromsubclass_=False, pretty_print=True): - super(ConditionalDerivedVariable, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ConditionalDerivedVariable", + ): + super(ConditionalDerivedVariable, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ConditionalDerivedVariable", + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ConditionalDerivedVariable", + fromsubclass_=False, + pretty_print=True, + ): + super(ConditionalDerivedVariable, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for Case_ in self.Case: - Case_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Case', pretty_print=pretty_print) + Case_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Case", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -2159,77 +3537,143 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - super(ConditionalDerivedVariable, self).buildAttributes(node, attrs, already_processed) + super(ConditionalDerivedVariable, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'Case': + if nodeName_ == "Case": obj_ = Case.factory(parent_object_=self) obj_.build(child_) self.Case.append(obj_) - obj_.original_tagname_ = 'Case' - super(ConditionalDerivedVariable, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "Case" + super(ConditionalDerivedVariable, self).buildChildren( + child_, node, nodeName_, True + ) + + # end class ConditionalDerivedVariable class Case(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('condition', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('value', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_("condition", "xs:string", 0, 1, {"use": u"optional"}), + MemberSpec_("value", "xs:string", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, condition=None, value=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.condition = _cast(None, condition) self.value = _cast(None, value) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Case) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Case) if subclass is not None: return subclass(*args_, **kwargs_) if Case.subclass: return Case.subclass(*args_, **kwargs_) else: return Case(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Case', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Case') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Case", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Case") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Case') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Case" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Case', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Case'): - if self.condition is not None and 'condition' not in already_processed: - already_processed.add('condition') - outfile.write(' condition=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.condition), input_name='condition')), )) - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Case', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Case", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Case" + ): + if self.condition is not None and "condition" not in already_processed: + already_processed.add("condition") + outfile.write( + " condition=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.condition), input_name="condition" + ) + ), + ) + ) + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Case", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -2237,79 +3681,146 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('condition', node) - if value is not None and 'condition' not in already_processed: - already_processed.add('condition') + value = find_attr_value_("condition", node) + if value is not None and "condition" not in already_processed: + already_processed.add("condition") self.condition = value - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class Case class TimeDerivative(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('variable', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('value', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_("variable", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("value", "xs:string", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, variable=None, value=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.variable = _cast(None, variable) self.value = _cast(None, value) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, TimeDerivative) + subclass = getSubclassFromModule_(CurrentSubclassModule_, TimeDerivative) if subclass is not None: return subclass(*args_, **kwargs_) if TimeDerivative.subclass: return TimeDerivative.subclass(*args_, **kwargs_) else: return TimeDerivative(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimeDerivative', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('TimeDerivative') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="TimeDerivative", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("TimeDerivative") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimeDerivative') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="TimeDerivative" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TimeDerivative', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TimeDerivative'): - if self.variable is not None and 'variable' not in already_processed: - already_processed.add('variable') - outfile.write(' variable=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.variable), input_name='variable')), )) - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimeDerivative', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="TimeDerivative", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="TimeDerivative", + ): + if self.variable is not None and "variable" not in already_processed: + already_processed.add("variable") + outfile.write( + " variable=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.variable), input_name="variable" + ) + ), + ) + ) + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="TimeDerivative", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -2317,74 +3828,132 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('variable', node) - if value is not None and 'variable' not in already_processed: - already_processed.add('variable') + value = find_attr_value_("variable", node) + if value is not None and "variable" not in already_processed: + already_processed.add("variable") self.variable = value - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class TimeDerivative class IncludeType(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('href', 'xs:anyURI', 0, 0, {'use': u'required'}), + MemberSpec_("href", "xs:anyURI", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, href=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.href = _cast(None, href) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IncludeType) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IncludeType) if subclass is not None: return subclass(*args_, **kwargs_) if IncludeType.subclass: return IncludeType.subclass(*args_, **kwargs_) else: return IncludeType(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IncludeType', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IncludeType') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IncludeType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IncludeType") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IncludeType') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IncludeType" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IncludeType', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IncludeType'): - if self.href is not None and 'href' not in already_processed: - already_processed.add('href') - outfile.write(' href=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.href), input_name='href')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IncludeType', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IncludeType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IncludeType", + ): + if self.href is not None and "href" not in already_processed: + already_processed.add("href") + outfile.write( + " href=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.href), input_name="href" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IncludeType", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -2392,89 +3961,173 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('href', node) - if value is not None and 'href' not in already_processed: - already_processed.add('href') + value = find_attr_value_("href", node) + if value is not None and "href" not in already_processed: + already_processed.add("href") self.href = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class IncludeType class Q10ConductanceScaling(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('q10_factor', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('experimental_temp', 'Nml2Quantity_temperature', 0, 0, {'use': u'required'}), + MemberSpec_("q10_factor", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_( + "experimental_temp", "Nml2Quantity_temperature", 0, 0, {"use": u"required"} + ), ] subclass = None superclass = None + def __init__(self, q10_factor=None, experimental_temp=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.q10_factor = _cast(None, q10_factor) self.experimental_temp = _cast(None, experimental_temp) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, Q10ConductanceScaling) + CurrentSubclassModule_, Q10ConductanceScaling + ) if subclass is not None: return subclass(*args_, **kwargs_) if Q10ConductanceScaling.subclass: return Q10ConductanceScaling.subclass(*args_, **kwargs_) else: return Q10ConductanceScaling(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$"] + ] + def validate_Nml2Quantity_temperature(self, value): # Validate type Nml2Quantity_temperature, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_temperature_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_temperature_patterns_, )) - validate_Nml2Quantity_temperature_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC)$']] - def hasContent_(self): - if ( + self.validate_Nml2Quantity_temperature_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_temperature_patterns_, + ) + ) - ): + validate_Nml2Quantity_temperature_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC)$"] + ] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q10ConductanceScaling', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Q10ConductanceScaling') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Q10ConductanceScaling", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Q10ConductanceScaling") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Q10ConductanceScaling') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="Q10ConductanceScaling", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Q10ConductanceScaling', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Q10ConductanceScaling'): - if self.q10_factor is not None and 'q10_factor' not in already_processed: - already_processed.add('q10_factor') - outfile.write(' q10Factor=%s' % (quote_attrib(self.q10_factor), )) - if self.experimental_temp is not None and 'experimental_temp' not in already_processed: - already_processed.add('experimental_temp') - outfile.write(' experimentalTemp=%s' % (quote_attrib(self.experimental_temp), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q10ConductanceScaling', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Q10ConductanceScaling", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Q10ConductanceScaling", + ): + if self.q10_factor is not None and "q10_factor" not in already_processed: + already_processed.add("q10_factor") + outfile.write(" q10Factor=%s" % (quote_attrib(self.q10_factor),)) + if ( + self.experimental_temp is not None + and "experimental_temp" not in already_processed + ): + already_processed.add("experimental_temp") + outfile.write( + " experimentalTemp=%s" % (quote_attrib(self.experimental_temp),) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Q10ConductanceScaling", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -2482,112 +4135,210 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('q10Factor', node) - if value is not None and 'q10Factor' not in already_processed: - already_processed.add('q10Factor') + value = find_attr_value_("q10Factor", node) + if value is not None and "q10Factor" not in already_processed: + already_processed.add("q10Factor") self.q10_factor = value - self.validate_Nml2Quantity_none(self.q10_factor) # validate type Nml2Quantity_none - value = find_attr_value_('experimentalTemp', node) - if value is not None and 'experimentalTemp' not in already_processed: - already_processed.add('experimentalTemp') + self.validate_Nml2Quantity_none( + self.q10_factor + ) # validate type Nml2Quantity_none + value = find_attr_value_("experimentalTemp", node) + if value is not None and "experimentalTemp" not in already_processed: + already_processed.add("experimentalTemp") self.experimental_temp = value - self.validate_Nml2Quantity_temperature(self.experimental_temp) # validate type Nml2Quantity_temperature + self.validate_Nml2Quantity_temperature( + self.experimental_temp + ) # validate type Nml2Quantity_temperature + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class Q10ConductanceScaling class Q10Settings(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('type', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('fixed_q10', 'Nml2Quantity_none', 0, 1, {'use': u'optional'}), - MemberSpec_('q10_factor', 'Nml2Quantity_none', 0, 1, {'use': u'optional'}), - MemberSpec_('experimental_temp', 'Nml2Quantity_temperature', 0, 1, {'use': u'optional'}), + MemberSpec_("type", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("fixed_q10", "Nml2Quantity_none", 0, 1, {"use": u"optional"}), + MemberSpec_("q10_factor", "Nml2Quantity_none", 0, 1, {"use": u"optional"}), + MemberSpec_( + "experimental_temp", "Nml2Quantity_temperature", 0, 1, {"use": u"optional"} + ), ] subclass = None superclass = None - def __init__(self, type=None, fixed_q10=None, q10_factor=None, experimental_temp=None, **kwargs_): + + def __init__( + self, + type=None, + fixed_q10=None, + q10_factor=None, + experimental_temp=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.type = _cast(None, type) self.fixed_q10 = _cast(None, fixed_q10) self.q10_factor = _cast(None, q10_factor) self.experimental_temp = _cast(None, experimental_temp) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Q10Settings) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Q10Settings) if subclass is not None: return subclass(*args_, **kwargs_) if Q10Settings.subclass: return Q10Settings.subclass(*args_, **kwargs_) else: return Q10Settings(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$"] + ] + def validate_Nml2Quantity_temperature(self, value): # Validate type Nml2Quantity_temperature, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_temperature_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_temperature_patterns_, )) - validate_Nml2Quantity_temperature_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC)$']] - def hasContent_(self): - if ( + self.validate_Nml2Quantity_temperature_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_temperature_patterns_, + ) + ) - ): + validate_Nml2Quantity_temperature_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC)$"] + ] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q10Settings', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Q10Settings') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Q10Settings", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Q10Settings") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Q10Settings') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Q10Settings" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Q10Settings', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Q10Settings'): - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.fixed_q10 is not None and 'fixed_q10' not in already_processed: - already_processed.add('fixed_q10') - outfile.write(' fixedQ10=%s' % (quote_attrib(self.fixed_q10), )) - if self.q10_factor is not None and 'q10_factor' not in already_processed: - already_processed.add('q10_factor') - outfile.write(' q10Factor=%s' % (quote_attrib(self.q10_factor), )) - if self.experimental_temp is not None and 'experimental_temp' not in already_processed: - already_processed.add('experimental_temp') - outfile.write(' experimentalTemp=%s' % (quote_attrib(self.experimental_temp), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q10Settings', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Q10Settings", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Q10Settings", + ): + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write(" type=%s" % (quote_attrib(self.type),)) + if self.fixed_q10 is not None and "fixed_q10" not in already_processed: + already_processed.add("fixed_q10") + outfile.write(" fixedQ10=%s" % (quote_attrib(self.fixed_q10),)) + if self.q10_factor is not None and "q10_factor" not in already_processed: + already_processed.add("q10_factor") + outfile.write(" q10Factor=%s" % (quote_attrib(self.q10_factor),)) + if ( + self.experimental_temp is not None + and "experimental_temp" not in already_processed + ): + already_processed.add("experimental_temp") + outfile.write( + " experimentalTemp=%s" % (quote_attrib(self.experimental_temp),) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Q10Settings", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -2595,122 +4346,203 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_NmlId(self.type) # validate type NmlId - value = find_attr_value_('fixedQ10', node) - if value is not None and 'fixedQ10' not in already_processed: - already_processed.add('fixedQ10') + self.validate_NmlId(self.type) # validate type NmlId + value = find_attr_value_("fixedQ10", node) + if value is not None and "fixedQ10" not in already_processed: + already_processed.add("fixedQ10") self.fixed_q10 = value - self.validate_Nml2Quantity_none(self.fixed_q10) # validate type Nml2Quantity_none - value = find_attr_value_('q10Factor', node) - if value is not None and 'q10Factor' not in already_processed: - already_processed.add('q10Factor') + self.validate_Nml2Quantity_none( + self.fixed_q10 + ) # validate type Nml2Quantity_none + value = find_attr_value_("q10Factor", node) + if value is not None and "q10Factor" not in already_processed: + already_processed.add("q10Factor") self.q10_factor = value - self.validate_Nml2Quantity_none(self.q10_factor) # validate type Nml2Quantity_none - value = find_attr_value_('experimentalTemp', node) - if value is not None and 'experimentalTemp' not in already_processed: - already_processed.add('experimentalTemp') + self.validate_Nml2Quantity_none( + self.q10_factor + ) # validate type Nml2Quantity_none + value = find_attr_value_("experimentalTemp", node) + if value is not None and "experimentalTemp" not in already_processed: + already_processed.add("experimentalTemp") self.experimental_temp = value - self.validate_Nml2Quantity_temperature(self.experimental_temp) # validate type Nml2Quantity_temperature + self.validate_Nml2Quantity_temperature( + self.experimental_temp + ) # validate type Nml2Quantity_temperature + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class Q10Settings class HHRate(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('type', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('rate', 'Nml2Quantity_pertime', 0, 1, {'use': u'optional'}), - MemberSpec_('midpoint', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), - MemberSpec_('scale', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), + MemberSpec_("type", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("rate", "Nml2Quantity_pertime", 0, 1, {"use": u"optional"}), + MemberSpec_("midpoint", "Nml2Quantity_voltage", 0, 1, {"use": u"optional"}), + MemberSpec_("scale", "Nml2Quantity_voltage", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None + def __init__(self, type=None, rate=None, midpoint=None, scale=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.type = _cast(None, type) self.rate = _cast(None, rate) self.midpoint = _cast(None, midpoint) self.scale = _cast(None, scale) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, HHRate) + subclass = getSubclassFromModule_(CurrentSubclassModule_, HHRate) if subclass is not None: return subclass(*args_, **kwargs_) if HHRate.subclass: return HHRate.subclass(*args_, **kwargs_) else: return HHRate(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.validate_Nml2Quantity_pertime_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): - if ( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) - ): + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHRate', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('HHRate') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HHRate", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("HHRate") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HHRate') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="HHRate" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HHRate', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HHRate'): - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.rate is not None and 'rate' not in already_processed: - already_processed.add('rate') - outfile.write(' rate=%s' % (quote_attrib(self.rate), )) - if self.midpoint is not None and 'midpoint' not in already_processed: - already_processed.add('midpoint') - outfile.write(' midpoint=%s' % (quote_attrib(self.midpoint), )) - if self.scale is not None and 'scale' not in already_processed: - already_processed.add('scale') - outfile.write(' scale=%s' % (quote_attrib(self.scale), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHRate', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="HHRate", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="HHRate" + ): + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write(" type=%s" % (quote_attrib(self.type),)) + if self.rate is not None and "rate" not in already_processed: + already_processed.add("rate") + outfile.write(" rate=%s" % (quote_attrib(self.rate),)) + if self.midpoint is not None and "midpoint" not in already_processed: + already_processed.add("midpoint") + outfile.write(" midpoint=%s" % (quote_attrib(self.midpoint),)) + if self.scale is not None and "scale" not in already_processed: + already_processed.add("scale") + outfile.write(" scale=%s" % (quote_attrib(self.scale),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HHRate", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -2718,115 +4550,187 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_NmlId(self.type) # validate type NmlId - value = find_attr_value_('rate', node) - if value is not None and 'rate' not in already_processed: - already_processed.add('rate') + self.validate_NmlId(self.type) # validate type NmlId + value = find_attr_value_("rate", node) + if value is not None and "rate" not in already_processed: + already_processed.add("rate") self.rate = value - self.validate_Nml2Quantity_pertime(self.rate) # validate type Nml2Quantity_pertime - value = find_attr_value_('midpoint', node) - if value is not None and 'midpoint' not in already_processed: - already_processed.add('midpoint') + self.validate_Nml2Quantity_pertime( + self.rate + ) # validate type Nml2Quantity_pertime + value = find_attr_value_("midpoint", node) + if value is not None and "midpoint" not in already_processed: + already_processed.add("midpoint") self.midpoint = value - self.validate_Nml2Quantity_voltage(self.midpoint) # validate type Nml2Quantity_voltage - value = find_attr_value_('scale', node) - if value is not None and 'scale' not in already_processed: - already_processed.add('scale') + self.validate_Nml2Quantity_voltage( + self.midpoint + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("scale", node) + if value is not None and "scale" not in already_processed: + already_processed.add("scale") self.scale = value - self.validate_Nml2Quantity_voltage(self.scale) # validate type Nml2Quantity_voltage + self.validate_Nml2Quantity_voltage( + self.scale + ) # validate type Nml2Quantity_voltage + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class HHRate class HHVariable(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('type', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('rate', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('midpoint', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), - MemberSpec_('scale', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), + MemberSpec_("type", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("rate", "xs:float", 0, 1, {"use": u"optional"}), + MemberSpec_("midpoint", "Nml2Quantity_voltage", 0, 1, {"use": u"optional"}), + MemberSpec_("scale", "Nml2Quantity_voltage", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None + def __init__(self, type=None, rate=None, midpoint=None, scale=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.type = _cast(None, type) self.rate = _cast(float, rate) self.midpoint = _cast(None, midpoint) self.scale = _cast(None, scale) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, HHVariable) + subclass = getSubclassFromModule_(CurrentSubclassModule_, HHVariable) if subclass is not None: return subclass(*args_, **kwargs_) if HHVariable.subclass: return HHVariable.subclass(*args_, **kwargs_) else: return HHVariable(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): - if ( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) - ): + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHVariable', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('HHVariable') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HHVariable", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("HHVariable") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HHVariable') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="HHVariable" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HHVariable', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HHVariable'): - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.rate is not None and 'rate' not in already_processed: - already_processed.add('rate') - outfile.write(' rate="%s"' % self.gds_format_float(self.rate, input_name='rate')) - if self.midpoint is not None and 'midpoint' not in already_processed: - already_processed.add('midpoint') - outfile.write(' midpoint=%s' % (quote_attrib(self.midpoint), )) - if self.scale is not None and 'scale' not in already_processed: - already_processed.add('scale') - outfile.write(' scale=%s' % (quote_attrib(self.scale), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHVariable', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="HHVariable", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="HHVariable" + ): + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write(" type=%s" % (quote_attrib(self.type),)) + if self.rate is not None and "rate" not in already_processed: + already_processed.add("rate") + outfile.write( + ' rate="%s"' % self.gds_format_float(self.rate, input_name="rate") + ) + if self.midpoint is not None and "midpoint" not in already_processed: + already_processed.add("midpoint") + outfile.write(" midpoint=%s" % (quote_attrib(self.midpoint),)) + if self.scale is not None and "scale" not in already_processed: + already_processed.add("scale") + outfile.write(" scale=%s" % (quote_attrib(self.scale),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HHVariable", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -2834,129 +4738,210 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_NmlId(self.type) # validate type NmlId - value = find_attr_value_('rate', node) - if value is not None and 'rate' not in already_processed: - already_processed.add('rate') + self.validate_NmlId(self.type) # validate type NmlId + value = find_attr_value_("rate", node) + if value is not None and "rate" not in already_processed: + already_processed.add("rate") try: self.rate = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (rate): %s' % exp) - value = find_attr_value_('midpoint', node) - if value is not None and 'midpoint' not in already_processed: - already_processed.add('midpoint') + raise ValueError("Bad float/double attribute (rate): %s" % exp) + value = find_attr_value_("midpoint", node) + if value is not None and "midpoint" not in already_processed: + already_processed.add("midpoint") self.midpoint = value - self.validate_Nml2Quantity_voltage(self.midpoint) # validate type Nml2Quantity_voltage - value = find_attr_value_('scale', node) - if value is not None and 'scale' not in already_processed: - already_processed.add('scale') + self.validate_Nml2Quantity_voltage( + self.midpoint + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("scale", node) + if value is not None and "scale" not in already_processed: + already_processed.add("scale") self.scale = value - self.validate_Nml2Quantity_voltage(self.scale) # validate type Nml2Quantity_voltage + self.validate_Nml2Quantity_voltage( + self.scale + ) # validate type Nml2Quantity_voltage + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class HHVariable class HHTime(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('type', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('rate', 'Nml2Quantity_time', 0, 1, {'use': u'optional'}), - MemberSpec_('midpoint', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), - MemberSpec_('scale', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), - MemberSpec_('tau', 'Nml2Quantity_time', 0, 1, {'use': u'optional'}), + MemberSpec_("type", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("rate", "Nml2Quantity_time", 0, 1, {"use": u"optional"}), + MemberSpec_("midpoint", "Nml2Quantity_voltage", 0, 1, {"use": u"optional"}), + MemberSpec_("scale", "Nml2Quantity_voltage", 0, 1, {"use": u"optional"}), + MemberSpec_("tau", "Nml2Quantity_time", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None - def __init__(self, type=None, rate=None, midpoint=None, scale=None, tau=None, **kwargs_): + + def __init__( + self, type=None, rate=None, midpoint=None, scale=None, tau=None, **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.type = _cast(None, type) self.rate = _cast(None, rate) self.midpoint = _cast(None, midpoint) self.scale = _cast(None, scale) self.tau = _cast(None, tau) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, HHTime) + subclass = getSubclassFromModule_(CurrentSubclassModule_, HHTime) if subclass is not None: return subclass(*args_, **kwargs_) if HHTime.subclass: return HHTime.subclass(*args_, **kwargs_) else: return HHTime(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): - if ( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) - ): + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHTime', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('HHTime') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HHTime", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("HHTime") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HHTime') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="HHTime" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HHTime', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HHTime'): - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.rate is not None and 'rate' not in already_processed: - already_processed.add('rate') - outfile.write(' rate=%s' % (quote_attrib(self.rate), )) - if self.midpoint is not None and 'midpoint' not in already_processed: - already_processed.add('midpoint') - outfile.write(' midpoint=%s' % (quote_attrib(self.midpoint), )) - if self.scale is not None and 'scale' not in already_processed: - already_processed.add('scale') - outfile.write(' scale=%s' % (quote_attrib(self.scale), )) - if self.tau is not None and 'tau' not in already_processed: - already_processed.add('tau') - outfile.write(' tau=%s' % (quote_attrib(self.tau), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHTime', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="HHTime", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="HHTime" + ): + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write(" type=%s" % (quote_attrib(self.type),)) + if self.rate is not None and "rate" not in already_processed: + already_processed.add("rate") + outfile.write(" rate=%s" % (quote_attrib(self.rate),)) + if self.midpoint is not None and "midpoint" not in already_processed: + already_processed.add("midpoint") + outfile.write(" midpoint=%s" % (quote_attrib(self.midpoint),)) + if self.scale is not None and "scale" not in already_processed: + already_processed.add("scale") + outfile.write(" scale=%s" % (quote_attrib(self.scale),)) + if self.tau is not None and "tau" not in already_processed: + already_processed.add("tau") + outfile.write(" tau=%s" % (quote_attrib(self.tau),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HHTime", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -2964,144 +4949,255 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_NmlId(self.type) # validate type NmlId - value = find_attr_value_('rate', node) - if value is not None and 'rate' not in already_processed: - already_processed.add('rate') + self.validate_NmlId(self.type) # validate type NmlId + value = find_attr_value_("rate", node) + if value is not None and "rate" not in already_processed: + already_processed.add("rate") self.rate = value - self.validate_Nml2Quantity_time(self.rate) # validate type Nml2Quantity_time - value = find_attr_value_('midpoint', node) - if value is not None and 'midpoint' not in already_processed: - already_processed.add('midpoint') + self.validate_Nml2Quantity_time( + self.rate + ) # validate type Nml2Quantity_time + value = find_attr_value_("midpoint", node) + if value is not None and "midpoint" not in already_processed: + already_processed.add("midpoint") self.midpoint = value - self.validate_Nml2Quantity_voltage(self.midpoint) # validate type Nml2Quantity_voltage - value = find_attr_value_('scale', node) - if value is not None and 'scale' not in already_processed: - already_processed.add('scale') + self.validate_Nml2Quantity_voltage( + self.midpoint + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("scale", node) + if value is not None and "scale" not in already_processed: + already_processed.add("scale") self.scale = value - self.validate_Nml2Quantity_voltage(self.scale) # validate type Nml2Quantity_voltage - value = find_attr_value_('tau', node) - if value is not None and 'tau' not in already_processed: - already_processed.add('tau') + self.validate_Nml2Quantity_voltage( + self.scale + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("tau", node) + if value is not None and "tau" not in already_processed: + already_processed.add("tau") self.tau = value - self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time + self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class HHTime class BlockMechanism(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('type', 'BlockTypes', 0, 0, {'use': u'required'}), - MemberSpec_('species', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('block_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('scaling_conc', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('scaling_volt', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_("type", "BlockTypes", 0, 0, {"use": u"required"}), + MemberSpec_("species", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "block_concentration", + "Nml2Quantity_concentration", + 0, + 0, + {"use": u"required"}, + ), + MemberSpec_( + "scaling_conc", "Nml2Quantity_concentration", 0, 0, {"use": u"required"} + ), + MemberSpec_("scaling_volt", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), ] subclass = None superclass = None - def __init__(self, type=None, species=None, block_concentration=None, scaling_conc=None, scaling_volt=None, **kwargs_): + + def __init__( + self, + type=None, + species=None, + block_concentration=None, + scaling_conc=None, + scaling_volt=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.type = _cast(None, type) self.species = _cast(None, species) self.block_concentration = _cast(None, block_concentration) self.scaling_conc = _cast(None, scaling_conc) self.scaling_volt = _cast(None, scaling_volt) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BlockMechanism) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BlockMechanism) if subclass is not None: return subclass(*args_, **kwargs_) if BlockMechanism.subclass: return BlockMechanism.subclass(*args_, **kwargs_) else: return BlockMechanism(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_BlockTypes(self, value): # Validate type BlockTypes, a restriction on xs:string. if value is not None and Validate_simpletypes_: value = str(value) - enumerations = ['voltageConcDepBlockMechanism'] + enumerations = ["voltageConcDepBlockMechanism"] enumeration_respectee = False for enum in enumerations: if value == enum: enumeration_respectee = True break if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on BlockTypes' % {"value" : value.encode("utf-8")} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd enumeration restriction on BlockTypes' + % {"value": value.encode("utf-8")} + ) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_concentration(self, value): # Validate type Nml2Quantity_concentration, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_concentration_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_concentration_patterns_, )) - validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] + self.validate_Nml2Quantity_concentration_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_concentration_patterns_, + ) + ) + + validate_Nml2Quantity_concentration_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): - if ( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) - ): + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BlockMechanism', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BlockMechanism') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BlockMechanism", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BlockMechanism") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BlockMechanism') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BlockMechanism" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BlockMechanism', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BlockMechanism'): - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.species is not None and 'species' not in already_processed: - already_processed.add('species') - outfile.write(' species=%s' % (quote_attrib(self.species), )) - if self.block_concentration is not None and 'block_concentration' not in already_processed: - already_processed.add('block_concentration') - outfile.write(' blockConcentration=%s' % (quote_attrib(self.block_concentration), )) - if self.scaling_conc is not None and 'scaling_conc' not in already_processed: - already_processed.add('scaling_conc') - outfile.write(' scalingConc=%s' % (quote_attrib(self.scaling_conc), )) - if self.scaling_volt is not None and 'scaling_volt' not in already_processed: - already_processed.add('scaling_volt') - outfile.write(' scalingVolt=%s' % (quote_attrib(self.scaling_volt), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BlockMechanism', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BlockMechanism", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BlockMechanism", + ): + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write(" type=%s" % (quote_attrib(self.type),)) + if self.species is not None and "species" not in already_processed: + already_processed.add("species") + outfile.write(" species=%s" % (quote_attrib(self.species),)) + if ( + self.block_concentration is not None + and "block_concentration" not in already_processed + ): + already_processed.add("block_concentration") + outfile.write( + " blockConcentration=%s" % (quote_attrib(self.block_concentration),) + ) + if self.scaling_conc is not None and "scaling_conc" not in already_processed: + already_processed.add("scaling_conc") + outfile.write(" scalingConc=%s" % (quote_attrib(self.scaling_conc),)) + if self.scaling_volt is not None and "scaling_volt" not in already_processed: + already_processed.add("scaling_volt") + outfile.write(" scalingVolt=%s" % (quote_attrib(self.scaling_volt),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BlockMechanism", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -3109,132 +5205,225 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_BlockTypes(self.type) # validate type BlockTypes - value = find_attr_value_('species', node) - if value is not None and 'species' not in already_processed: - already_processed.add('species') + self.validate_BlockTypes(self.type) # validate type BlockTypes + value = find_attr_value_("species", node) + if value is not None and "species" not in already_processed: + already_processed.add("species") self.species = value - self.validate_NmlId(self.species) # validate type NmlId - value = find_attr_value_('blockConcentration', node) - if value is not None and 'blockConcentration' not in already_processed: - already_processed.add('blockConcentration') + self.validate_NmlId(self.species) # validate type NmlId + value = find_attr_value_("blockConcentration", node) + if value is not None and "blockConcentration" not in already_processed: + already_processed.add("blockConcentration") self.block_concentration = value - self.validate_Nml2Quantity_concentration(self.block_concentration) # validate type Nml2Quantity_concentration - value = find_attr_value_('scalingConc', node) - if value is not None and 'scalingConc' not in already_processed: - already_processed.add('scalingConc') + self.validate_Nml2Quantity_concentration( + self.block_concentration + ) # validate type Nml2Quantity_concentration + value = find_attr_value_("scalingConc", node) + if value is not None and "scalingConc" not in already_processed: + already_processed.add("scalingConc") self.scaling_conc = value - self.validate_Nml2Quantity_concentration(self.scaling_conc) # validate type Nml2Quantity_concentration - value = find_attr_value_('scalingVolt', node) - if value is not None and 'scalingVolt' not in already_processed: - already_processed.add('scalingVolt') + self.validate_Nml2Quantity_concentration( + self.scaling_conc + ) # validate type Nml2Quantity_concentration + value = find_attr_value_("scalingVolt", node) + if value is not None and "scalingVolt" not in already_processed: + already_processed.add("scalingVolt") self.scaling_volt = value - self.validate_Nml2Quantity_voltage(self.scaling_volt) # validate type Nml2Quantity_voltage + self.validate_Nml2Quantity_voltage( + self.scaling_volt + ) # validate type Nml2Quantity_voltage + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class BlockMechanism class PlasticityMechanism(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('type', 'PlasticityTypes', 0, 0, {'use': u'required'}), - MemberSpec_('init_release_prob', 'ZeroToOne', 0, 0, {'use': u'required'}), - MemberSpec_('tau_rec', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('tau_fac', 'Nml2Quantity_time', 0, 1, {'use': u'optional'}), + MemberSpec_("type", "PlasticityTypes", 0, 0, {"use": u"required"}), + MemberSpec_("init_release_prob", "ZeroToOne", 0, 0, {"use": u"required"}), + MemberSpec_("tau_rec", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("tau_fac", "Nml2Quantity_time", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None - def __init__(self, type=None, init_release_prob=None, tau_rec=None, tau_fac=None, **kwargs_): + + def __init__( + self, type=None, init_release_prob=None, tau_rec=None, tau_fac=None, **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.type = _cast(None, type) self.init_release_prob = _cast(float, init_release_prob) self.tau_rec = _cast(None, tau_rec) self.tau_fac = _cast(None, tau_fac) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, PlasticityMechanism) + CurrentSubclassModule_, PlasticityMechanism + ) if subclass is not None: return subclass(*args_, **kwargs_) if PlasticityMechanism.subclass: return PlasticityMechanism.subclass(*args_, **kwargs_) else: return PlasticityMechanism(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_PlasticityTypes(self, value): # Validate type PlasticityTypes, a restriction on xs:string. if value is not None and Validate_simpletypes_: value = str(value) - enumerations = ['tsodyksMarkramDepMechanism', 'tsodyksMarkramDepFacMechanism'] + enumerations = [ + "tsodyksMarkramDepMechanism", + "tsodyksMarkramDepFacMechanism", + ] enumeration_respectee = False for enum in enumerations: if value == enum: enumeration_respectee = True break if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on PlasticityTypes' % {"value" : value.encode("utf-8")} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd enumeration restriction on PlasticityTypes' + % {"value": value.encode("utf-8")} + ) + def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. if value is not None and Validate_simpletypes_: if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' + % {"value": value} + ) if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' + % {"value": value} + ) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): - if ( + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) - ): + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PlasticityMechanism', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('PlasticityMechanism') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PlasticityMechanism", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PlasticityMechanism") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PlasticityMechanism') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PlasticityMechanism", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PlasticityMechanism', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PlasticityMechanism'): - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.init_release_prob is not None and 'init_release_prob' not in already_processed: - already_processed.add('init_release_prob') - outfile.write(' initReleaseProb=%s' % (quote_attrib(self.init_release_prob), )) - if self.tau_rec is not None and 'tau_rec' not in already_processed: - already_processed.add('tau_rec') - outfile.write(' tauRec=%s' % (quote_attrib(self.tau_rec), )) - if self.tau_fac is not None and 'tau_fac' not in already_processed: - already_processed.add('tau_fac') - outfile.write(' tauFac=%s' % (quote_attrib(self.tau_fac), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PlasticityMechanism', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PlasticityMechanism", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PlasticityMechanism", + ): + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write(" type=%s" % (quote_attrib(self.type),)) + if ( + self.init_release_prob is not None + and "init_release_prob" not in already_processed + ): + already_processed.add("init_release_prob") + outfile.write( + " initReleaseProb=%s" % (quote_attrib(self.init_release_prob),) + ) + if self.tau_rec is not None and "tau_rec" not in already_processed: + already_processed.add("tau_rec") + outfile.write(" tauRec=%s" % (quote_attrib(self.tau_rec),)) + if self.tau_fac is not None and "tau_fac" not in already_processed: + already_processed.add("tau_fac") + outfile.write(" tauFac=%s" % (quote_attrib(self.tau_fac),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PlasticityMechanism", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -3242,105 +5431,168 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_PlasticityTypes(self.type) # validate type PlasticityTypes - value = find_attr_value_('initReleaseProb', node) - if value is not None and 'initReleaseProb' not in already_processed: - already_processed.add('initReleaseProb') + self.validate_PlasticityTypes(self.type) # validate type PlasticityTypes + value = find_attr_value_("initReleaseProb", node) + if value is not None and "initReleaseProb" not in already_processed: + already_processed.add("initReleaseProb") try: self.init_release_prob = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (initReleaseProb): %s' % exp) - self.validate_ZeroToOne(self.init_release_prob) # validate type ZeroToOne - value = find_attr_value_('tauRec', node) - if value is not None and 'tauRec' not in already_processed: - already_processed.add('tauRec') + raise ValueError( + "Bad float/double attribute (initReleaseProb): %s" % exp + ) + self.validate_ZeroToOne(self.init_release_prob) # validate type ZeroToOne + value = find_attr_value_("tauRec", node) + if value is not None and "tauRec" not in already_processed: + already_processed.add("tauRec") self.tau_rec = value - self.validate_Nml2Quantity_time(self.tau_rec) # validate type Nml2Quantity_time - value = find_attr_value_('tauFac', node) - if value is not None and 'tauFac' not in already_processed: - already_processed.add('tauFac') + self.validate_Nml2Quantity_time( + self.tau_rec + ) # validate type Nml2Quantity_time + value = find_attr_value_("tauFac", node) + if value is not None and "tauFac" not in already_processed: + already_processed.add("tauFac") self.tau_fac = value - self.validate_Nml2Quantity_time(self.tau_fac) # validate type Nml2Quantity_time + self.validate_Nml2Quantity_time( + self.tau_fac + ) # validate type Nml2Quantity_time + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class PlasticityMechanism class SegmentParent(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('segments', 'NonNegativeInteger', 0, 0, {'use': u'required'}), - MemberSpec_('fraction_along', 'ZeroToOne', 0, 1, {'use': u'optional'}), + MemberSpec_("segments", "NonNegativeInteger", 0, 0, {"use": u"required"}), + MemberSpec_("fraction_along", "ZeroToOne", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None - def __init__(self, segments=None, fraction_along='1', **kwargs_): + + def __init__(self, segments=None, fraction_along="1", **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.segments = _cast(int, segments) self.fraction_along = _cast(float, fraction_along) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SegmentParent) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SegmentParent) if subclass is not None: return subclass(*args_, **kwargs_) if SegmentParent.subclass: return SegmentParent.subclass(*args_, **kwargs_) else: return SegmentParent(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_: pass + def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. if value is not None and Validate_simpletypes_: if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' + % {"value": value} + ) if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) - def hasContent_(self): - if ( + warnings_.warn( + 'Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' + % {"value": value} + ) - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentParent', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SegmentParent') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SegmentParent", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SegmentParent") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentParent') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SegmentParent" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SegmentParent', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SegmentParent'): - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.fraction_along != 1 and 'fraction_along' not in already_processed: - already_processed.add('fraction_along') - outfile.write(' fractionAlong=%s' % (quote_attrib(self.fraction_along), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentParent', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SegmentParent", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SegmentParent", + ): + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write(" segment=%s" % (quote_attrib(self.segments),)) + if self.fraction_along != 1 and "fraction_along" not in already_processed: + already_processed.add("fraction_along") + outfile.write(" fractionAlong=%s" % (quote_attrib(self.fraction_along),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SegmentParent", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -3348,105 +5600,161 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") try: self.segments = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.segments < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger - value = find_attr_value_('fractionAlong', node) - if value is not None and 'fractionAlong' not in already_processed: - already_processed.add('fractionAlong') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.segments + ) # validate type NonNegativeInteger + value = find_attr_value_("fractionAlong", node) + if value is not None and "fractionAlong" not in already_processed: + already_processed.add("fractionAlong") try: self.fraction_along = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (fractionAlong): %s' % exp) - self.validate_ZeroToOne(self.fraction_along) # validate type ZeroToOne + raise ValueError("Bad float/double attribute (fractionAlong): %s" % exp) + self.validate_ZeroToOne(self.fraction_along) # validate type ZeroToOne + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class SegmentParent class Point3DWithDiam(GeneratedsSuper): """A 3D point with diameter.""" + member_data_items_ = [ - MemberSpec_('x', 'xs:double', 0, 0, {'use': u'required'}), - MemberSpec_('y', 'xs:double', 0, 0, {'use': u'required'}), - MemberSpec_('z', 'xs:double', 0, 0, {'use': u'required'}), - MemberSpec_('diameter', 'DoubleGreaterThanZero', 0, 0, {'use': u'required'}), + MemberSpec_("x", "xs:double", 0, 0, {"use": u"required"}), + MemberSpec_("y", "xs:double", 0, 0, {"use": u"required"}), + MemberSpec_("z", "xs:double", 0, 0, {"use": u"required"}), + MemberSpec_("diameter", "DoubleGreaterThanZero", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, x=None, y=None, z=None, diameter=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.x = _cast(float, x) self.y = _cast(float, y) self.z = _cast(float, z) self.diameter = _cast(float, diameter) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Point3DWithDiam) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Point3DWithDiam) if subclass is not None: return subclass(*args_, **kwargs_) if Point3DWithDiam.subclass: return Point3DWithDiam.subclass(*args_, **kwargs_) else: return Point3DWithDiam(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_DoubleGreaterThanZero(self, value): # Validate type DoubleGreaterThanZero, a restriction on xs:double. if value is not None and Validate_simpletypes_: if value <= 0: - warnings_.warn('Value "%(value)s" does not match xsd minExclusive restriction on DoubleGreaterThanZero' % {"value" : value} ) - def hasContent_(self): - if ( + warnings_.warn( + 'Value "%(value)s" does not match xsd minExclusive restriction on DoubleGreaterThanZero' + % {"value": value} + ) - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Point3DWithDiam', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Point3DWithDiam') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Point3DWithDiam", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Point3DWithDiam") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Point3DWithDiam') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Point3DWithDiam" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Point3DWithDiam', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Point3DWithDiam'): - if self.x is not None and 'x' not in already_processed: - already_processed.add('x') - outfile.write(' x="%s"' % self.gds_format_double(self.x, input_name='x')) - if self.y is not None and 'y' not in already_processed: - already_processed.add('y') - outfile.write(' y="%s"' % self.gds_format_double(self.y, input_name='y')) - if self.z is not None and 'z' not in already_processed: - already_processed.add('z') - outfile.write(' z="%s"' % self.gds_format_double(self.z, input_name='z')) - if self.diameter is not None and 'diameter' not in already_processed: - already_processed.add('diameter') - outfile.write(' diameter=%s' % (quote_attrib(self.diameter), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Point3DWithDiam', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Point3DWithDiam", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Point3DWithDiam", + ): + if self.x is not None and "x" not in already_processed: + already_processed.add("x") + outfile.write(' x="%s"' % self.gds_format_double(self.x, input_name="x")) + if self.y is not None and "y" not in already_processed: + already_processed.add("y") + outfile.write(' y="%s"' % self.gds_format_double(self.y, input_name="y")) + if self.z is not None and "z" not in already_processed: + already_processed.add("z") + outfile.write(' z="%s"' % self.gds_format_double(self.z, input_name="z")) + if self.diameter is not None and "diameter" not in already_processed: + already_processed.add("diameter") + outfile.write(" diameter=%s" % (quote_attrib(self.diameter),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Point3DWithDiam", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -3454,42 +5762,56 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('x', node) - if value is not None and 'x' not in already_processed: - already_processed.add('x') + value = find_attr_value_("x", node) + if value is not None and "x" not in already_processed: + already_processed.add("x") try: self.x = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (x): %s' % exp) - value = find_attr_value_('y', node) - if value is not None and 'y' not in already_processed: - already_processed.add('y') + raise ValueError("Bad float/double attribute (x): %s" % exp) + value = find_attr_value_("y", node) + if value is not None and "y" not in already_processed: + already_processed.add("y") try: self.y = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (y): %s' % exp) - value = find_attr_value_('z', node) - if value is not None and 'z' not in already_processed: - already_processed.add('z') + raise ValueError("Bad float/double attribute (y): %s" % exp) + value = find_attr_value_("z", node) + if value is not None and "z" not in already_processed: + already_processed.add("z") try: self.z = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (z): %s' % exp) - value = find_attr_value_('diameter', node) - if value is not None and 'diameter' not in already_processed: - already_processed.add('diameter') + raise ValueError("Bad float/double attribute (z): %s" % exp) + value = find_attr_value_("diameter", node) + if value is not None and "diameter" not in already_processed: + already_processed.add("diameter") try: self.diameter = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (diameter): %s' % exp) - self.validate_DoubleGreaterThanZero(self.diameter) # validate type DoubleGreaterThanZero + raise ValueError("Bad float/double attribute (diameter): %s" % exp) + self.validate_DoubleGreaterThanZero( + self.diameter + ) # validate type DoubleGreaterThanZero + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass def __str__(self): - return "("+str(self.x)+", "+str(self.y)+", "+str(self.z)+"), diam "+str(self.diameter)+"um" + return ( + "(" + + str(self.x) + + ", " + + str(self.y) + + ", " + + str(self.z) + + "), diam " + + str(self.diameter) + + "um" + ) def __repr__(self): @@ -3511,140 +5833,253 @@ def distance_to(self, other_3d_point): b_y = other_3d_point.y b_z = other_3d_point.z - distance = ((a_x-b_x)**2 + (a_y-b_y)**2 + (a_z-b_z)**2)**(0.5) + distance = ((a_x - b_x) ** 2 + (a_y - b_y) ** 2 + (a_z - b_z) ** 2) ** (0.5) return distance + # end class Point3DWithDiam class ProximalDetails(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('translation_start', 'xs:double', 0, 0, {'use': u'required'}), + MemberSpec_("translation_start", "xs:double", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, translation_start=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.translation_start = _cast(float, translation_start) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ProximalDetails) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ProximalDetails) if subclass is not None: return subclass(*args_, **kwargs_) if ProximalDetails.subclass: return ProximalDetails.subclass(*args_, **kwargs_) else: return ProximalDetails(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ProximalDetails', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ProximalDetails') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ProximalDetails", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ProximalDetails") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ProximalDetails') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ProximalDetails" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ProximalDetails', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ProximalDetails'): - if self.translation_start is not None and 'translation_start' not in already_processed: - already_processed.add('translation_start') - outfile.write(' translationStart="%s"' % self.gds_format_double(self.translation_start, input_name='translationStart')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ProximalDetails', fromsubclass_=False, pretty_print=True): - pass - def build(self, node): - already_processed = set() + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ProximalDetails", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ProximalDetails", + ): + if ( + self.translation_start is not None + and "translation_start" not in already_processed + ): + already_processed.add("translation_start") + outfile.write( + ' translationStart="%s"' + % self.gds_format_double( + self.translation_start, input_name="translationStart" + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ProximalDetails", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node): + already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('translationStart', node) - if value is not None and 'translationStart' not in already_processed: - already_processed.add('translationStart') + value = find_attr_value_("translationStart", node) + if value is not None and "translationStart" not in already_processed: + already_processed.add("translationStart") try: self.translation_start = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (translationStart): %s' % exp) + raise ValueError( + "Bad float/double attribute (translationStart): %s" % exp + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class ProximalDetails class DistalDetails(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('normalization_end', 'xs:double', 0, 0, {'use': u'required'}), + MemberSpec_("normalization_end", "xs:double", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, normalization_end=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.normalization_end = _cast(float, normalization_end) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, DistalDetails) + subclass = getSubclassFromModule_(CurrentSubclassModule_, DistalDetails) if subclass is not None: return subclass(*args_, **kwargs_) if DistalDetails.subclass: return DistalDetails.subclass(*args_, **kwargs_) else: return DistalDetails(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DistalDetails', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('DistalDetails') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DistalDetails", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("DistalDetails") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DistalDetails') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="DistalDetails" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DistalDetails', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DistalDetails'): - if self.normalization_end is not None and 'normalization_end' not in already_processed: - already_processed.add('normalization_end') - outfile.write(' normalizationEnd="%s"' % self.gds_format_double(self.normalization_end, input_name='normalizationEnd')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DistalDetails', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="DistalDetails", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="DistalDetails", + ): + if ( + self.normalization_end is not None + and "normalization_end" not in already_processed + ): + already_processed.add("normalization_end") + outfile.write( + ' normalizationEnd="%s"' + % self.gds_format_double( + self.normalization_end, input_name="normalizationEnd" + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DistalDetails", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -3652,77 +6087,124 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('normalizationEnd', node) - if value is not None and 'normalizationEnd' not in already_processed: - already_processed.add('normalizationEnd') + value = find_attr_value_("normalizationEnd", node) + if value is not None and "normalizationEnd" not in already_processed: + already_processed.add("normalizationEnd") try: self.normalization_end = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (normalizationEnd): %s' % exp) + raise ValueError( + "Bad float/double attribute (normalizationEnd): %s" % exp + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class DistalDetails class Member(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('segments', 'NonNegativeInteger', 0, 0, {'use': u'required'}), + MemberSpec_("segments", "NonNegativeInteger", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, segments=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.segments = _cast(int, segments) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Member) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Member) if subclass is not None: return subclass(*args_, **kwargs_) if Member.subclass: return Member.subclass(*args_, **kwargs_) else: return Member(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_: pass - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Member', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Member') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Member", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Member") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Member') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Member" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Member', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Member'): - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Member', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Member", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Member" + ): + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write(" segment=%s" % (quote_attrib(self.segments),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Member", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -3730,83 +6212,141 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") try: self.segments = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.segments < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.segments + ) # validate type NonNegativeInteger + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class Member class Include(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('segment_groups', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_("segment_groups", "NmlId", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, segment_groups=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.segment_groups = _cast(None, segment_groups) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Include) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Include) if subclass is not None: return subclass(*args_, **kwargs_) if Include.subclass: return Include.subclass(*args_, **kwargs_) else: return Include(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): - if ( + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) - ): + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Include', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Include') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Include", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Include") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Include') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Include" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Include', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Include'): - if self.segment_groups is not None and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Include', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Include", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Include" + ): + if ( + self.segment_groups is not None + and "segment_groups" not in already_processed + ): + already_processed.add("segment_groups") + outfile.write(" segmentGroup=%s" % (quote_attrib(self.segment_groups),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Include", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -3814,80 +6354,151 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId + self.validate_NmlId(self.segment_groups) # validate type NmlId + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class Include class Path(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('from_', 'SegmentEndPoint', 0, 1, {u'type': u'SegmentEndPoint', u'name': u'from', u'minOccurs': u'0'}, None), - MemberSpec_('to', 'SegmentEndPoint', 0, 1, {u'type': u'SegmentEndPoint', u'name': u'to', u'minOccurs': u'0'}, None), + MemberSpec_( + "from_", + "SegmentEndPoint", + 0, + 1, + {u"type": u"SegmentEndPoint", u"name": u"from", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "to", + "SegmentEndPoint", + 0, + 1, + {u"type": u"SegmentEndPoint", u"name": u"to", u"minOccurs": u"0"}, + None, + ), ] subclass = None superclass = None + def __init__(self, from_=None, to=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.from_ = from_ self.to = to + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Path) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Path) if subclass is not None: return subclass(*args_, **kwargs_) if Path.subclass: return Path.subclass(*args_, **kwargs_) else: return Path(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - self.from_ is not None or - self.to is not None - ): + if self.from_ is not None or self.to is not None: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Path', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Path') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Path", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Path") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Path') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Path" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Path', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Path", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Path'): + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Path" + ): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Path', fromsubclass_=False, pretty_print=True): + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Path", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.from_ is not None: - self.from_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='from', pretty_print=pretty_print) + self.from_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="from", + pretty_print=pretty_print, + ) if self.to is not None: - self.to.export(outfile, level, namespaceprefix_, namespacedef_='', name_='to', pretty_print=pretty_print) + self.to.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="to", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -3895,85 +6506,156 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'from': + if nodeName_ == "from": obj_ = SegmentEndPoint.factory(parent_object_=self) obj_.build(child_) self.from_ = obj_ - obj_.original_tagname_ = 'from' - elif nodeName_ == 'to': + obj_.original_tagname_ = "from" + elif nodeName_ == "to": obj_ = SegmentEndPoint.factory(parent_object_=self) obj_.build(child_) self.to = obj_ - obj_.original_tagname_ = 'to' + obj_.original_tagname_ = "to" + + # end class Path class SubTree(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('from_', 'SegmentEndPoint', 0, 1, {u'type': u'SegmentEndPoint', u'name': u'from', u'minOccurs': u'0'}, 3), - MemberSpec_('to', 'SegmentEndPoint', 0, 1, {u'type': u'SegmentEndPoint', u'name': u'to', u'minOccurs': u'0'}, 3), + MemberSpec_( + "from_", + "SegmentEndPoint", + 0, + 1, + {u"type": u"SegmentEndPoint", u"name": u"from", u"minOccurs": u"0"}, + 3, + ), + MemberSpec_( + "to", + "SegmentEndPoint", + 0, + 1, + {u"type": u"SegmentEndPoint", u"name": u"to", u"minOccurs": u"0"}, + 3, + ), ] subclass = None superclass = None + def __init__(self, from_=None, to=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.from_ = from_ self.to = to + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SubTree) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SubTree) if subclass is not None: return subclass(*args_, **kwargs_) if SubTree.subclass: return SubTree.subclass(*args_, **kwargs_) else: return SubTree(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - self.from_ is not None or - self.to is not None - ): + if self.from_ is not None or self.to is not None: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SubTree', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SubTree') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SubTree", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SubTree") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SubTree') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SubTree" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SubTree', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SubTree", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SubTree'): + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="SubTree" + ): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SubTree', fromsubclass_=False, pretty_print=True): + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SubTree", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.from_ is not None: - self.from_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='from', pretty_print=pretty_print) + self.from_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="from", + pretty_print=pretty_print, + ) if self.to is not None: - self.to.export(outfile, level, namespaceprefix_, namespacedef_='', name_='to', pretty_print=pretty_print) + self.to.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="to", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -3981,80 +6663,130 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'from': + if nodeName_ == "from": obj_ = SegmentEndPoint.factory(parent_object_=self) obj_.build(child_) self.from_ = obj_ - obj_.original_tagname_ = 'from' - elif nodeName_ == 'to': + obj_.original_tagname_ = "from" + elif nodeName_ == "to": obj_ = SegmentEndPoint.factory(parent_object_=self) obj_.build(child_) self.to = obj_ - obj_.original_tagname_ = 'to' + obj_.original_tagname_ = "to" + + # end class SubTree class SegmentEndPoint(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('segments', 'NonNegativeInteger', 0, 0, {'use': u'required'}), + MemberSpec_("segments", "NonNegativeInteger", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, segments=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.segments = _cast(int, segments) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SegmentEndPoint) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SegmentEndPoint) if subclass is not None: return subclass(*args_, **kwargs_) if SegmentEndPoint.subclass: return SegmentEndPoint.subclass(*args_, **kwargs_) else: return SegmentEndPoint(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_: pass - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentEndPoint', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SegmentEndPoint') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SegmentEndPoint", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SegmentEndPoint") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentEndPoint') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SegmentEndPoint" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SegmentEndPoint', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SegmentEndPoint'): - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentEndPoint', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SegmentEndPoint", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SegmentEndPoint", + ): + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write(" segment=%s" % (quote_attrib(self.segments),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SegmentEndPoint", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -4062,42 +6794,209 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") try: self.segments = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.segments < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.segments + ) # validate type NonNegativeInteger + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class SegmentEndPoint class MembraneProperties(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('channel_populations', 'ChannelPopulation', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelPopulation', u'name': u'channelPopulation', u'minOccurs': u'0'}, None), - MemberSpec_('channel_densities', 'ChannelDensity', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensity', u'name': u'channelDensity', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_v_shifts', 'ChannelDensityVShift', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityVShift', u'name': u'channelDensityVShift', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_nernsts', 'ChannelDensityNernst', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNernst', u'name': u'channelDensityNernst', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_ghks', 'ChannelDensityGHK', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityGHK', u'name': u'channelDensityGHK', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_ghk2s', 'ChannelDensityGHK2', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityGHK2', u'name': u'channelDensityGHK2', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_non_uniforms', 'ChannelDensityNonUniform', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNonUniform', u'name': u'channelDensityNonUniform', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_non_uniform_nernsts', 'ChannelDensityNonUniformNernst', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNonUniformNernst', u'name': u'channelDensityNonUniformNernst', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_non_uniform_ghks', 'ChannelDensityNonUniformGHK', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNonUniformGHK', u'name': u'channelDensityNonUniformGHK', u'minOccurs': u'0'}, None), - MemberSpec_('spike_threshes', 'SpikeThresh', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeThresh', u'name': u'spikeThresh', u'minOccurs': u'0'}, None), - MemberSpec_('specific_capacitances', 'SpecificCapacitance', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpecificCapacitance', u'name': u'specificCapacitance', u'minOccurs': u'0'}, None), - MemberSpec_('init_memb_potentials', 'InitMembPotential', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InitMembPotential', u'name': u'initMembPotential', u'minOccurs': u'0'}, None), + MemberSpec_( + "channel_populations", + "ChannelPopulation", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ChannelPopulation", + u"name": u"channelPopulation", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "channel_densities", + "ChannelDensity", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ChannelDensity", + u"name": u"channelDensity", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "channel_density_v_shifts", + "ChannelDensityVShift", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ChannelDensityVShift", + u"name": u"channelDensityVShift", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "channel_density_nernsts", + "ChannelDensityNernst", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ChannelDensityNernst", + u"name": u"channelDensityNernst", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "channel_density_ghks", + "ChannelDensityGHK", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ChannelDensityGHK", + u"name": u"channelDensityGHK", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "channel_density_ghk2s", + "ChannelDensityGHK2", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ChannelDensityGHK2", + u"name": u"channelDensityGHK2", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "channel_density_non_uniforms", + "ChannelDensityNonUniform", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ChannelDensityNonUniform", + u"name": u"channelDensityNonUniform", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "channel_density_non_uniform_nernsts", + "ChannelDensityNonUniformNernst", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ChannelDensityNonUniformNernst", + u"name": u"channelDensityNonUniformNernst", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "channel_density_non_uniform_ghks", + "ChannelDensityNonUniformGHK", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ChannelDensityNonUniformGHK", + u"name": u"channelDensityNonUniformGHK", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "spike_threshes", + "SpikeThresh", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SpikeThresh", + u"name": u"spikeThresh", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "specific_capacitances", + "SpecificCapacitance", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SpecificCapacitance", + u"name": u"specificCapacitance", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "init_memb_potentials", + "InitMembPotential", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"InitMembPotential", + u"name": u"initMembPotential", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = None - def __init__(self, channel_populations=None, channel_densities=None, channel_density_v_shifts=None, channel_density_nernsts=None, channel_density_ghks=None, channel_density_ghk2s=None, channel_density_non_uniforms=None, channel_density_non_uniform_nernsts=None, channel_density_non_uniform_ghks=None, spike_threshes=None, specific_capacitances=None, init_memb_potentials=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + channel_populations=None, + channel_densities=None, + channel_density_v_shifts=None, + channel_density_nernsts=None, + channel_density_ghks=None, + channel_density_ghk2s=None, + channel_density_non_uniforms=None, + channel_density_non_uniform_nernsts=None, + channel_density_non_uniform_ghks=None, + spike_threshes=None, + specific_capacitances=None, + init_memb_potentials=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") if channel_populations is None: self.channel_populations = [] else: @@ -4129,7 +7028,9 @@ def __init__(self, channel_populations=None, channel_densities=None, channel_den if channel_density_non_uniform_nernsts is None: self.channel_density_non_uniform_nernsts = [] else: - self.channel_density_non_uniform_nernsts = channel_density_non_uniform_nernsts + self.channel_density_non_uniform_nernsts = ( + channel_density_non_uniform_nernsts + ) if channel_density_non_uniform_ghks is None: self.channel_density_non_uniform_ghks = [] else: @@ -4147,91 +7048,227 @@ def __init__(self, channel_populations=None, channel_densities=None, channel_den else: self.init_memb_potentials = init_memb_potentials self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, MembraneProperties) + CurrentSubclassModule_, MembraneProperties + ) if subclass is not None: return subclass(*args_, **kwargs_) if MembraneProperties.subclass: return MembraneProperties.subclass(*args_, **kwargs_) else: return MembraneProperties(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.channel_populations or - self.channel_densities or - self.channel_density_v_shifts or - self.channel_density_nernsts or - self.channel_density_ghks or - self.channel_density_ghk2s or - self.channel_density_non_uniforms or - self.channel_density_non_uniform_nernsts or - self.channel_density_non_uniform_ghks or - self.spike_threshes or - self.specific_capacitances or - self.init_memb_potentials + self.channel_populations + or self.channel_densities + or self.channel_density_v_shifts + or self.channel_density_nernsts + or self.channel_density_ghks + or self.channel_density_ghk2s + or self.channel_density_non_uniforms + or self.channel_density_non_uniform_nernsts + or self.channel_density_non_uniform_ghks + or self.spike_threshes + or self.specific_capacitances + or self.init_memb_potentials ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MembraneProperties', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('MembraneProperties') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="MembraneProperties", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("MembraneProperties") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MembraneProperties') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="MembraneProperties", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MembraneProperties', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="MembraneProperties", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MembraneProperties'): - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="MembraneProperties", + ): + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MembraneProperties', fromsubclass_=False, pretty_print=True): + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="MembraneProperties", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for channelPopulation_ in self.channel_populations: - channelPopulation_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelPopulation', pretty_print=pretty_print) + channelPopulation_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelPopulation", + pretty_print=pretty_print, + ) for channelDensity_ in self.channel_densities: - channelDensity_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensity', pretty_print=pretty_print) + channelDensity_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensity", + pretty_print=pretty_print, + ) for channelDensityVShift_ in self.channel_density_v_shifts: - channelDensityVShift_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityVShift', pretty_print=pretty_print) + channelDensityVShift_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityVShift", + pretty_print=pretty_print, + ) for channelDensityNernst_ in self.channel_density_nernsts: - channelDensityNernst_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNernst', pretty_print=pretty_print) + channelDensityNernst_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityNernst", + pretty_print=pretty_print, + ) for channelDensityGHK_ in self.channel_density_ghks: - channelDensityGHK_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityGHK', pretty_print=pretty_print) + channelDensityGHK_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityGHK", + pretty_print=pretty_print, + ) for channelDensityGHK2_ in self.channel_density_ghk2s: - channelDensityGHK2_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityGHK2', pretty_print=pretty_print) + channelDensityGHK2_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityGHK2", + pretty_print=pretty_print, + ) for channelDensityNonUniform_ in self.channel_density_non_uniforms: - channelDensityNonUniform_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNonUniform', pretty_print=pretty_print) + channelDensityNonUniform_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityNonUniform", + pretty_print=pretty_print, + ) for channelDensityNonUniformNernst_ in self.channel_density_non_uniform_nernsts: - channelDensityNonUniformNernst_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNonUniformNernst', pretty_print=pretty_print) + channelDensityNonUniformNernst_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityNonUniformNernst", + pretty_print=pretty_print, + ) for channelDensityNonUniformGHK_ in self.channel_density_non_uniform_ghks: - channelDensityNonUniformGHK_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNonUniformGHK', pretty_print=pretty_print) + channelDensityNonUniformGHK_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityNonUniformGHK", + pretty_print=pretty_print, + ) for spikeThresh_ in self.spike_threshes: - spikeThresh_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeThresh', pretty_print=pretty_print) + spikeThresh_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spikeThresh", + pretty_print=pretty_print, + ) for specificCapacitance_ in self.specific_capacitances: - specificCapacitance_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='specificCapacitance', pretty_print=pretty_print) + specificCapacitance_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="specificCapacitance", + pretty_print=pretty_print, + ) for initMembPotential_ in self.init_memb_potentials: - initMembPotential_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='initMembPotential', pretty_print=pretty_print) + initMembPotential_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="initMembPotential", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -4239,141 +7276,255 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'channelPopulation': + if nodeName_ == "channelPopulation": obj_ = ChannelPopulation.factory(parent_object_=self) obj_.build(child_) self.channel_populations.append(obj_) - obj_.original_tagname_ = 'channelPopulation' - elif nodeName_ == 'channelDensity': + obj_.original_tagname_ = "channelPopulation" + elif nodeName_ == "channelDensity": class_obj_ = self.get_class_obj_(child_, ChannelDensity) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.channel_densities.append(obj_) - obj_.original_tagname_ = 'channelDensity' - elif nodeName_ == 'channelDensityVShift': + obj_.original_tagname_ = "channelDensity" + elif nodeName_ == "channelDensityVShift": obj_ = ChannelDensityVShift.factory(parent_object_=self) obj_.build(child_) self.channel_density_v_shifts.append(obj_) - obj_.original_tagname_ = 'channelDensityVShift' - elif nodeName_ == 'channelDensityNernst': + obj_.original_tagname_ = "channelDensityVShift" + elif nodeName_ == "channelDensityNernst": class_obj_ = self.get_class_obj_(child_, ChannelDensityNernst) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.channel_density_nernsts.append(obj_) - obj_.original_tagname_ = 'channelDensityNernst' - elif nodeName_ == 'channelDensityGHK': + obj_.original_tagname_ = "channelDensityNernst" + elif nodeName_ == "channelDensityGHK": obj_ = ChannelDensityGHK.factory(parent_object_=self) obj_.build(child_) self.channel_density_ghks.append(obj_) - obj_.original_tagname_ = 'channelDensityGHK' - elif nodeName_ == 'channelDensityGHK2': + obj_.original_tagname_ = "channelDensityGHK" + elif nodeName_ == "channelDensityGHK2": obj_ = ChannelDensityGHK2.factory(parent_object_=self) obj_.build(child_) self.channel_density_ghk2s.append(obj_) - obj_.original_tagname_ = 'channelDensityGHK2' - elif nodeName_ == 'channelDensityNonUniform': + obj_.original_tagname_ = "channelDensityGHK2" + elif nodeName_ == "channelDensityNonUniform": obj_ = ChannelDensityNonUniform.factory(parent_object_=self) obj_.build(child_) self.channel_density_non_uniforms.append(obj_) - obj_.original_tagname_ = 'channelDensityNonUniform' - elif nodeName_ == 'channelDensityNonUniformNernst': + obj_.original_tagname_ = "channelDensityNonUniform" + elif nodeName_ == "channelDensityNonUniformNernst": obj_ = ChannelDensityNonUniformNernst.factory(parent_object_=self) obj_.build(child_) self.channel_density_non_uniform_nernsts.append(obj_) - obj_.original_tagname_ = 'channelDensityNonUniformNernst' - elif nodeName_ == 'channelDensityNonUniformGHK': + obj_.original_tagname_ = "channelDensityNonUniformNernst" + elif nodeName_ == "channelDensityNonUniformGHK": obj_ = ChannelDensityNonUniformGHK.factory(parent_object_=self) obj_.build(child_) self.channel_density_non_uniform_ghks.append(obj_) - obj_.original_tagname_ = 'channelDensityNonUniformGHK' - elif nodeName_ == 'spikeThresh': + obj_.original_tagname_ = "channelDensityNonUniformGHK" + elif nodeName_ == "spikeThresh": obj_ = SpikeThresh.factory(parent_object_=self) obj_.build(child_) self.spike_threshes.append(obj_) - obj_.original_tagname_ = 'spikeThresh' - elif nodeName_ == 'specificCapacitance': + obj_.original_tagname_ = "spikeThresh" + elif nodeName_ == "specificCapacitance": obj_ = SpecificCapacitance.factory(parent_object_=self) obj_.build(child_) self.specific_capacitances.append(obj_) - obj_.original_tagname_ = 'specificCapacitance' - elif nodeName_ == 'initMembPotential': + obj_.original_tagname_ = "specificCapacitance" + elif nodeName_ == "initMembPotential": obj_ = InitMembPotential.factory(parent_object_=self) obj_.build(child_) self.init_memb_potentials.append(obj_) - obj_.original_tagname_ = 'initMembPotential' + obj_.original_tagname_ = "initMembPotential" + + # end class MembraneProperties class MembraneProperties2CaPools(MembraneProperties): member_data_items_ = [ - MemberSpec_('channel_density_nernst_ca2s', 'ChannelDensityNernstCa2', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNernstCa2', u'name': u'channelDensityNernstCa2', u'minOccurs': u'0'}, None), + MemberSpec_( + "channel_density_nernst_ca2s", + "ChannelDensityNernstCa2", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ChannelDensityNernstCa2", + u"name": u"channelDensityNernstCa2", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = MembraneProperties - def __init__(self, channel_populations=None, channel_densities=None, channel_density_v_shifts=None, channel_density_nernsts=None, channel_density_ghks=None, channel_density_ghk2s=None, channel_density_non_uniforms=None, channel_density_non_uniform_nernsts=None, channel_density_non_uniform_ghks=None, spike_threshes=None, specific_capacitances=None, init_memb_potentials=None, channel_density_nernst_ca2s=None, **kwargs_): + + def __init__( + self, + channel_populations=None, + channel_densities=None, + channel_density_v_shifts=None, + channel_density_nernsts=None, + channel_density_ghks=None, + channel_density_ghk2s=None, + channel_density_non_uniforms=None, + channel_density_non_uniform_nernsts=None, + channel_density_non_uniform_ghks=None, + spike_threshes=None, + specific_capacitances=None, + init_memb_potentials=None, + channel_density_nernst_ca2s=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(MembraneProperties2CaPools, self).__init__(channel_populations, channel_densities, channel_density_v_shifts, channel_density_nernsts, channel_density_ghks, channel_density_ghk2s, channel_density_non_uniforms, channel_density_non_uniform_nernsts, channel_density_non_uniform_ghks, spike_threshes, specific_capacitances, init_memb_potentials, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(MembraneProperties2CaPools, self).__init__( + channel_populations, + channel_densities, + channel_density_v_shifts, + channel_density_nernsts, + channel_density_ghks, + channel_density_ghk2s, + channel_density_non_uniforms, + channel_density_non_uniform_nernsts, + channel_density_non_uniform_ghks, + spike_threshes, + specific_capacitances, + init_memb_potentials, + **kwargs_ + ) if channel_density_nernst_ca2s is None: self.channel_density_nernst_ca2s = [] else: self.channel_density_nernst_ca2s = channel_density_nernst_ca2s + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, MembraneProperties2CaPools) + CurrentSubclassModule_, MembraneProperties2CaPools + ) if subclass is not None: return subclass(*args_, **kwargs_) if MembraneProperties2CaPools.subclass: return MembraneProperties2CaPools.subclass(*args_, **kwargs_) else: return MembraneProperties2CaPools(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.channel_density_nernst_ca2s or - super(MembraneProperties2CaPools, self).hasContent_() + self.channel_density_nernst_ca2s + or super(MembraneProperties2CaPools, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MembraneProperties2CaPools', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('MembraneProperties2CaPools') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="MembraneProperties2CaPools", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("MembraneProperties2CaPools") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MembraneProperties2CaPools') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="MembraneProperties2CaPools", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MembraneProperties2CaPools', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="MembraneProperties2CaPools", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MembraneProperties2CaPools'): - super(MembraneProperties2CaPools, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MembraneProperties2CaPools') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MembraneProperties2CaPools', fromsubclass_=False, pretty_print=True): - super(MembraneProperties2CaPools, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="MembraneProperties2CaPools", + ): + super(MembraneProperties2CaPools, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="MembraneProperties2CaPools", + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="MembraneProperties2CaPools", + fromsubclass_=False, + pretty_print=True, + ): + super(MembraneProperties2CaPools, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for channelDensityNernstCa2_ in self.channel_density_nernst_ca2s: - channelDensityNernstCa2_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNernstCa2', pretty_print=pretty_print) + channelDensityNernstCa2_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityNernstCa2", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -4381,15 +7532,23 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - super(MembraneProperties2CaPools, self).buildAttributes(node, attrs, already_processed) + super(MembraneProperties2CaPools, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'channelDensityNernstCa2': + if nodeName_ == "channelDensityNernstCa2": obj_ = ChannelDensityNernstCa2.factory(parent_object_=self) obj_.build(child_) self.channel_density_nernst_ca2s.append(obj_) - obj_.original_tagname_ = 'channelDensityNernstCa2' - super(MembraneProperties2CaPools, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "channelDensityNernstCa2" + super(MembraneProperties2CaPools, self).buildChildren( + child_, node, nodeName_, True + ) + + # end class MembraneProperties2CaPools @@ -4398,78 +7557,144 @@ class SpikeThresh(GeneratedsSuper): the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction.""" + member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_("value", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("segment_groups", "NmlId", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None - def __init__(self, value=None, segment_groups='all', **kwargs_): + + def __init__(self, value=None, segment_groups="all", **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.value = _cast(None, value) self.segment_groups = _cast(None, segment_groups) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeThresh) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SpikeThresh) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeThresh.subclass: return SpikeThresh.subclass(*args_, **kwargs_) else: return SpikeThresh(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): - if ( + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) - ): + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeThresh', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeThresh') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeThresh", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeThresh") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeThresh') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SpikeThresh" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeThresh', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeThresh'): - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (quote_attrib(self.value), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeThresh', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeThresh", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpikeThresh", + ): + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write(" value=%s" % (quote_attrib(self.value),)) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write(" segmentGroup=%s" % (quote_attrib(self.segment_groups),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeThresh", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -4477,96 +7702,176 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - self.validate_Nml2Quantity_voltage(self.value) # validate type Nml2Quantity_voltage - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_voltage( + self.value + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId + self.validate_NmlId(self.segment_groups) # validate type NmlId + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class SpikeThresh class SpecificCapacitance(GeneratedsSuper): """Capacitance per unit area""" + member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity_specificCapacitance', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_( + "value", "Nml2Quantity_specificCapacitance", 0, 0, {"use": u"required"} + ), + MemberSpec_("segment_groups", "NmlId", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None - def __init__(self, value=None, segment_groups='all', **kwargs_): + + def __init__(self, value=None, segment_groups="all", **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.value = _cast(None, value) self.segment_groups = _cast(None, segment_groups) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpecificCapacitance) + CurrentSubclassModule_, SpecificCapacitance + ) if subclass is not None: return subclass(*args_, **kwargs_) if SpecificCapacitance.subclass: return SpecificCapacitance.subclass(*args_, **kwargs_) else: return SpecificCapacitance(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_specificCapacitance(self, value): # Validate type Nml2Quantity_specificCapacitance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_specificCapacitance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_specificCapacitance_patterns_, )) - validate_Nml2Quantity_specificCapacitance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F_per_m2|uF_per_cm2)$']] + self.validate_Nml2Quantity_specificCapacitance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_specificCapacitance_patterns_, + ) + ) + + validate_Nml2Quantity_specificCapacitance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F_per_m2|uF_per_cm2)$"] + ] + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): - if ( + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) - ): + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecificCapacitance', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpecificCapacitance') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpecificCapacitance", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpecificCapacitance") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecificCapacitance') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpecificCapacitance", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpecificCapacitance', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpecificCapacitance'): - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (quote_attrib(self.value), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecificCapacitance', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpecificCapacitance", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpecificCapacitance", + ): + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write(" value=%s" % (quote_attrib(self.value),)) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write(" segmentGroup=%s" % (quote_attrib(self.segment_groups),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpecificCapacitance", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -4574,96 +7879,172 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - self.validate_Nml2Quantity_specificCapacitance(self.value) # validate type Nml2Quantity_specificCapacitance - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_specificCapacitance( + self.value + ) # validate type Nml2Quantity_specificCapacitance + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId + self.validate_NmlId(self.segment_groups) # validate type NmlId + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class SpecificCapacitance class InitMembPotential(GeneratedsSuper): """Explicitly set initial membrane potential for the cell""" + member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_("value", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("segment_groups", "NmlId", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None - def __init__(self, value=None, segment_groups='all', **kwargs_): + + def __init__(self, value=None, segment_groups="all", **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.value = _cast(None, value) self.segment_groups = _cast(None, segment_groups) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, InitMembPotential) + subclass = getSubclassFromModule_(CurrentSubclassModule_, InitMembPotential) if subclass is not None: return subclass(*args_, **kwargs_) if InitMembPotential.subclass: return InitMembPotential.subclass(*args_, **kwargs_) else: return InitMembPotential(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): - if ( + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) - ): + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InitMembPotential', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InitMembPotential') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InitMembPotential", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InitMembPotential") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InitMembPotential') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="InitMembPotential", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InitMembPotential', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InitMembPotential'): - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (quote_attrib(self.value), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InitMembPotential', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InitMembPotential", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="InitMembPotential", + ): + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write(" value=%s" % (quote_attrib(self.value),)) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write(" segmentGroup=%s" % (quote_attrib(self.segment_groups),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InitMembPotential", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -4671,96 +8052,168 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - self.validate_Nml2Quantity_voltage(self.value) # validate type Nml2Quantity_voltage - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_voltage( + self.value + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId + self.validate_NmlId(self.segment_groups) # validate type NmlId + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class InitMembPotential class Resistivity(GeneratedsSuper): """The resistivity, or specific axial resistance, of the cytoplasm""" + member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity_resistivity', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_("value", "Nml2Quantity_resistivity", 0, 0, {"use": u"required"}), + MemberSpec_("segment_groups", "NmlId", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None - def __init__(self, value=None, segment_groups='all', **kwargs_): + + def __init__(self, value=None, segment_groups="all", **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.value = _cast(None, value) self.segment_groups = _cast(None, segment_groups) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Resistivity) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Resistivity) if subclass is not None: return subclass(*args_, **kwargs_) if Resistivity.subclass: return Resistivity.subclass(*args_, **kwargs_) else: return Resistivity(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_resistivity(self, value): # Validate type Nml2Quantity_resistivity, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_resistivity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_resistivity_patterns_, )) - validate_Nml2Quantity_resistivity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm_cm|kohm_cm|ohm_m)$']] + self.validate_Nml2Quantity_resistivity_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_resistivity_patterns_, + ) + ) + + validate_Nml2Quantity_resistivity_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm_cm|kohm_cm|ohm_m)$"] + ] + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): - if ( + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) - ): + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Resistivity', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Resistivity') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Resistivity", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Resistivity") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Resistivity') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Resistivity" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Resistivity', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Resistivity'): - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (quote_attrib(self.value), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Resistivity', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Resistivity", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Resistivity", + ): + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write(" value=%s" % (quote_attrib(self.value),)) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write(" segmentGroup=%s" % (quote_attrib(self.segment_groups),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Resistivity", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -4768,89 +8221,185 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - self.validate_Nml2Quantity_resistivity(self.value) # validate type Nml2Quantity_resistivity - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_resistivity( + self.value + ) # validate type Nml2Quantity_resistivity + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId + self.validate_NmlId(self.segment_groups) # validate type NmlId + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class Resistivity class VariableParameter(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('parameter', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('inhomogeneous_value', 'InhomogeneousValue', 0, 1, {u'type': u'InhomogeneousValue', u'name': u'inhomogeneousValue', u'minOccurs': u'0'}, None), + MemberSpec_("parameter", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("segment_groups", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_( + "inhomogeneous_value", + "InhomogeneousValue", + 0, + 1, + { + u"type": u"InhomogeneousValue", + u"name": u"inhomogeneousValue", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = None - def __init__(self, parameter=None, segment_groups=None, inhomogeneous_value=None, **kwargs_): + + def __init__( + self, parameter=None, segment_groups=None, inhomogeneous_value=None, **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.parameter = _cast(None, parameter) self.segment_groups = _cast(None, segment_groups) self.inhomogeneous_value = inhomogeneous_value + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, VariableParameter) + subclass = getSubclassFromModule_(CurrentSubclassModule_, VariableParameter) if subclass is not None: return subclass(*args_, **kwargs_) if VariableParameter.subclass: return VariableParameter.subclass(*args_, **kwargs_) else: return VariableParameter(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - self.inhomogeneous_value is not None - ): + if self.inhomogeneous_value is not None: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VariableParameter', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('VariableParameter') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="VariableParameter", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("VariableParameter") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VariableParameter') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="VariableParameter", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VariableParameter', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="VariableParameter", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VariableParameter'): - if self.parameter is not None and 'parameter' not in already_processed: - already_processed.add('parameter') - outfile.write(' parameter=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.parameter), input_name='parameter')), )) - if self.segment_groups is not None and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VariableParameter', fromsubclass_=False, pretty_print=True): + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="VariableParameter", + ): + if self.parameter is not None and "parameter" not in already_processed: + already_processed.add("parameter") + outfile.write( + " parameter=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.parameter), input_name="parameter" + ) + ), + ) + ) + if ( + self.segment_groups is not None + and "segment_groups" not in already_processed + ): + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="VariableParameter", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.inhomogeneous_value is not None: - self.inhomogeneous_value.export(outfile, level, namespaceprefix_, namespacedef_='', name_='inhomogeneousValue', pretty_print=pretty_print) + self.inhomogeneous_value.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="inhomogeneousValue", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -4858,83 +8407,162 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('parameter', node) - if value is not None and 'parameter' not in already_processed: - already_processed.add('parameter') + value = find_attr_value_("parameter", node) + if value is not None and "parameter" not in already_processed: + already_processed.add("parameter") self.parameter = value - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'inhomogeneousValue': + if nodeName_ == "inhomogeneousValue": obj_ = InhomogeneousValue.factory(parent_object_=self) obj_.build(child_) self.inhomogeneous_value = obj_ - obj_.original_tagname_ = 'inhomogeneousValue' + obj_.original_tagname_ = "inhomogeneousValue" + + # end class VariableParameter class InhomogeneousValue(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('inhomogeneous_parameters', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('value', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_( + "inhomogeneous_parameters", "xs:string", 0, 0, {"use": u"required"} + ), + MemberSpec_("value", "xs:string", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, inhomogeneous_parameters=None, value=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.inhomogeneous_parameters = _cast(None, inhomogeneous_parameters) self.value = _cast(None, value) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, InhomogeneousValue) + CurrentSubclassModule_, InhomogeneousValue + ) if subclass is not None: return subclass(*args_, **kwargs_) if InhomogeneousValue.subclass: return InhomogeneousValue.subclass(*args_, **kwargs_) else: return InhomogeneousValue(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InhomogeneousValue', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InhomogeneousValue') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InhomogeneousValue", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InhomogeneousValue") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InhomogeneousValue') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="InhomogeneousValue", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InhomogeneousValue', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InhomogeneousValue'): - if self.inhomogeneous_parameters is not None and 'inhomogeneous_parameters' not in already_processed: - already_processed.add('inhomogeneous_parameters') - outfile.write(' inhomogeneousParameter=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.inhomogeneous_parameters), input_name='inhomogeneousParameter')), )) - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InhomogeneousValue', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InhomogeneousValue", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="InhomogeneousValue", + ): + if ( + self.inhomogeneous_parameters is not None + and "inhomogeneous_parameters" not in already_processed + ): + already_processed.add("inhomogeneous_parameters") + outfile.write( + " inhomogeneousParameter=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.inhomogeneous_parameters), + input_name="inhomogeneousParameter", + ) + ), + ) + ) + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InhomogeneousValue", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -4942,17 +8570,21 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('inhomogeneousParameter', node) - if value is not None and 'inhomogeneousParameter' not in already_processed: - already_processed.add('inhomogeneousParameter') + value = find_attr_value_("inhomogeneousParameter", node) + if value is not None and "inhomogeneousParameter" not in already_processed: + already_processed.add("inhomogeneousParameter") self.inhomogeneous_parameters = value - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class InhomogeneousValue @@ -4960,98 +8592,196 @@ class Species(GeneratedsSuper): """Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now until LEMS implementation can select by id. TODO: remove.""" + member_data_items_ = [ - MemberSpec_('id', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('concentration_model', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('ion', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('initial_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('initial_ext_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_("id", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("concentration_model", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("ion", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_( + "initial_concentration", + "Nml2Quantity_concentration", + 0, + 0, + {"use": u"required"}, + ), + MemberSpec_( + "initial_ext_concentration", + "Nml2Quantity_concentration", + 0, + 0, + {"use": u"required"}, + ), + MemberSpec_("segment_groups", "NmlId", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None - def __init__(self, id=None, concentration_model=None, ion=None, initial_concentration=None, initial_ext_concentration=None, segment_groups='all', **kwargs_): + + def __init__( + self, + id=None, + concentration_model=None, + ion=None, + initial_concentration=None, + initial_ext_concentration=None, + segment_groups="all", + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.id = _cast(None, id) self.concentration_model = _cast(None, concentration_model) self.ion = _cast(None, ion) self.initial_concentration = _cast(None, initial_concentration) self.initial_ext_concentration = _cast(None, initial_ext_concentration) self.segment_groups = _cast(None, segment_groups) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Species) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Species) if subclass is not None: return subclass(*args_, **kwargs_) if Species.subclass: return Species.subclass(*args_, **kwargs_) else: return Species(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_concentration(self, value): # Validate type Nml2Quantity_concentration, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_concentration_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_concentration_patterns_, )) - validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] - def hasContent_(self): - if ( + self.validate_Nml2Quantity_concentration_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_concentration_patterns_, + ) + ) - ): + validate_Nml2Quantity_concentration_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$"] + ] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Species', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Species') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Species", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Species") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Species') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Species" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Species', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Species'): - if self.id is not None and 'id' not in already_processed: - already_processed.add('id') - outfile.write(' id=%s' % (quote_attrib(self.id), )) - if self.concentration_model is not None and 'concentration_model' not in already_processed: - already_processed.add('concentration_model') - outfile.write(' concentrationModel=%s' % (quote_attrib(self.concentration_model), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - if self.initial_concentration is not None and 'initial_concentration' not in already_processed: - already_processed.add('initial_concentration') - outfile.write(' initialConcentration=%s' % (quote_attrib(self.initial_concentration), )) - if self.initial_ext_concentration is not None and 'initial_ext_concentration' not in already_processed: - already_processed.add('initial_ext_concentration') - outfile.write(' initialExtConcentration=%s' % (quote_attrib(self.initial_ext_concentration), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Species', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Species", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Species" + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write(" id=%s" % (quote_attrib(self.id),)) + if ( + self.concentration_model is not None + and "concentration_model" not in already_processed + ): + already_processed.add("concentration_model") + outfile.write( + " concentrationModel=%s" % (quote_attrib(self.concentration_model),) + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write(" ion=%s" % (quote_attrib(self.ion),)) + if ( + self.initial_concentration is not None + and "initial_concentration" not in already_processed + ): + already_processed.add("initial_concentration") + outfile.write( + " initialConcentration=%s" % (quote_attrib(self.initial_concentration),) + ) + if ( + self.initial_ext_concentration is not None + and "initial_ext_concentration" not in already_processed + ): + already_processed.add("initial_ext_concentration") + outfile.write( + " initialExtConcentration=%s" + % (quote_attrib(self.initial_ext_concentration),) + ) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write(" segmentGroup=%s" % (quote_attrib(self.segment_groups),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Species", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -5059,52 +8789,87 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('id', node) - if value is not None and 'id' not in already_processed: - already_processed.add('id') + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") self.id = value - self.validate_NmlId(self.id) # validate type NmlId - value = find_attr_value_('concentrationModel', node) - if value is not None and 'concentrationModel' not in already_processed: - already_processed.add('concentrationModel') + self.validate_NmlId(self.id) # validate type NmlId + value = find_attr_value_("concentrationModel", node) + if value is not None and "concentrationModel" not in already_processed: + already_processed.add("concentrationModel") self.concentration_model = value - self.validate_NmlId(self.concentration_model) # validate type NmlId - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_NmlId(self.concentration_model) # validate type NmlId + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - value = find_attr_value_('initialConcentration', node) - if value is not None and 'initialConcentration' not in already_processed: - already_processed.add('initialConcentration') + self.validate_NmlId(self.ion) # validate type NmlId + value = find_attr_value_("initialConcentration", node) + if value is not None and "initialConcentration" not in already_processed: + already_processed.add("initialConcentration") self.initial_concentration = value - self.validate_Nml2Quantity_concentration(self.initial_concentration) # validate type Nml2Quantity_concentration - value = find_attr_value_('initialExtConcentration', node) - if value is not None and 'initialExtConcentration' not in already_processed: - already_processed.add('initialExtConcentration') + self.validate_Nml2Quantity_concentration( + self.initial_concentration + ) # validate type Nml2Quantity_concentration + value = find_attr_value_("initialExtConcentration", node) + if value is not None and "initialExtConcentration" not in already_processed: + already_processed.add("initialExtConcentration") self.initial_ext_concentration = value - self.validate_Nml2Quantity_concentration(self.initial_ext_concentration) # validate type Nml2Quantity_concentration - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_concentration( + self.initial_ext_concentration + ) # validate type Nml2Quantity_concentration + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId + self.validate_NmlId(self.segment_groups) # validate type NmlId + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class Species class IntracellularProperties(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('species', 'Species', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Species', u'name': u'species', u'minOccurs': u'0'}, None), - MemberSpec_('resistivities', 'Resistivity', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Resistivity', u'name': u'resistivity', u'minOccurs': u'0'}, None), + MemberSpec_( + "species", + "Species", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Species", + u"name": u"species", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "resistivities", + "Resistivity", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Resistivity", + u"name": u"resistivity", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = None - def __init__(self, species=None, resistivities=None, extensiontype_=None, **kwargs_): + + def __init__( + self, species=None, resistivities=None, extensiontype_=None, **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") if species is None: self.species = [] else: @@ -5114,61 +8879,124 @@ def __init__(self, species=None, resistivities=None, extensiontype_=None, **kwar else: self.resistivities = resistivities self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, IntracellularProperties) + CurrentSubclassModule_, IntracellularProperties + ) if subclass is not None: return subclass(*args_, **kwargs_) if IntracellularProperties.subclass: return IntracellularProperties.subclass(*args_, **kwargs_) else: return IntracellularProperties(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - self.species or - self.resistivities - ): + if self.species or self.resistivities: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntracellularProperties', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IntracellularProperties') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IntracellularProperties", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IntracellularProperties") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntracellularProperties') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IntracellularProperties", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IntracellularProperties', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IntracellularProperties", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IntracellularProperties'): - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IntracellularProperties", + ): + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntracellularProperties', fromsubclass_=False, pretty_print=True): + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IntracellularProperties", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for species_ in self.species: - species_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='species', pretty_print=pretty_print) + species_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="species", + pretty_print=pretty_print, + ) for resistivity_ in self.resistivities: - resistivity_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='resistivity', pretty_print=pretty_print) + resistivity_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="resistivity", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -5176,77 +9004,143 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'species': + if nodeName_ == "species": obj_ = Species.factory(parent_object_=self) obj_.build(child_) self.species.append(obj_) - obj_.original_tagname_ = 'species' - elif nodeName_ == 'resistivity': + obj_.original_tagname_ = "species" + elif nodeName_ == "resistivity": obj_ = Resistivity.factory(parent_object_=self) obj_.build(child_) self.resistivities.append(obj_) - obj_.original_tagname_ = 'resistivity' + obj_.original_tagname_ = "resistivity" + + # end class IntracellularProperties class IntracellularProperties2CaPools(IntracellularProperties): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = IntracellularProperties + def __init__(self, species=None, resistivities=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IntracellularProperties2CaPools, self).__init__(species, resistivities, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IntracellularProperties2CaPools, self).__init__( + species, resistivities, **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, IntracellularProperties2CaPools) + CurrentSubclassModule_, IntracellularProperties2CaPools + ) if subclass is not None: return subclass(*args_, **kwargs_) if IntracellularProperties2CaPools.subclass: return IntracellularProperties2CaPools.subclass(*args_, **kwargs_) else: return IntracellularProperties2CaPools(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(IntracellularProperties2CaPools, self).hasContent_() - ): + if super(IntracellularProperties2CaPools, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntracellularProperties2CaPools', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IntracellularProperties2CaPools') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IntracellularProperties2CaPools", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get( + "IntracellularProperties2CaPools" + ) if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntracellularProperties2CaPools') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IntracellularProperties2CaPools", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IntracellularProperties2CaPools', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IntracellularProperties2CaPools", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IntracellularProperties2CaPools'): - super(IntracellularProperties2CaPools, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntracellularProperties2CaPools') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntracellularProperties2CaPools', fromsubclass_=False, pretty_print=True): - super(IntracellularProperties2CaPools, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IntracellularProperties2CaPools", + ): + super(IntracellularProperties2CaPools, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IntracellularProperties2CaPools", + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IntracellularProperties2CaPools", + fromsubclass_=False, + pretty_print=True, + ): + super(IntracellularProperties2CaPools, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -5254,75 +9148,153 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - super(IntracellularProperties2CaPools, self).buildAttributes(node, attrs, already_processed) + super(IntracellularProperties2CaPools, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IntracellularProperties2CaPools, self).buildChildren(child_, node, nodeName_, True) + super(IntracellularProperties2CaPools, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class IntracellularProperties2CaPools class ExtracellularPropertiesLocal(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('species', 'Species', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Species', u'name': u'species', u'minOccurs': u'0'}, None), + MemberSpec_( + "species", + "Species", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Species", + u"name": u"species", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = None + def __init__(self, species=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") if species is None: self.species = [] else: self.species = species + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExtracellularPropertiesLocal) + CurrentSubclassModule_, ExtracellularPropertiesLocal + ) if subclass is not None: return subclass(*args_, **kwargs_) if ExtracellularPropertiesLocal.subclass: return ExtracellularPropertiesLocal.subclass(*args_, **kwargs_) else: return ExtracellularPropertiesLocal(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - self.species - ): + if self.species: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtracellularPropertiesLocal', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtracellularPropertiesLocal') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExtracellularPropertiesLocal", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExtracellularPropertiesLocal") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtracellularPropertiesLocal') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ExtracellularPropertiesLocal", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtracellularPropertiesLocal', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExtracellularPropertiesLocal", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtracellularPropertiesLocal'): + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExtracellularPropertiesLocal", + ): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtracellularPropertiesLocal', fromsubclass_=False, pretty_print=True): + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExtracellularPropertiesLocal", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for species_ in self.species: - species_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='species', pretty_print=pretty_print) + species_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="species", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -5330,96 +9302,172 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'species': + if nodeName_ == "species": obj_ = Species.factory(parent_object_=self) obj_.build(child_) self.species.append(obj_) - obj_.original_tagname_ = 'species' + obj_.original_tagname_ = "species" + + # end class ExtracellularPropertiesLocal class SpaceStructure(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('x_spacing', 'xs:float', 0, 1, {'use': 'optional'}), - MemberSpec_('y_spacing', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('z_spacing', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('x_start', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('y_start', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('z_start', 'xs:float', 0, 1, {'use': u'optional'}), + MemberSpec_("x_spacing", "xs:float", 0, 1, {"use": "optional"}), + MemberSpec_("y_spacing", "xs:float", 0, 1, {"use": u"optional"}), + MemberSpec_("z_spacing", "xs:float", 0, 1, {"use": u"optional"}), + MemberSpec_("x_start", "xs:float", 0, 1, {"use": u"optional"}), + MemberSpec_("y_start", "xs:float", 0, 1, {"use": u"optional"}), + MemberSpec_("z_start", "xs:float", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None - def __init__(self, x_spacing=None, y_spacing=None, z_spacing=None, x_start=0, y_start=0, z_start=0, **kwargs_): + + def __init__( + self, + x_spacing=None, + y_spacing=None, + z_spacing=None, + x_start=0, + y_start=0, + z_start=0, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.x_spacing = _cast(float, x_spacing) self.y_spacing = _cast(float, y_spacing) self.z_spacing = _cast(float, z_spacing) self.x_start = _cast(float, x_start) self.y_start = _cast(float, y_start) self.z_start = _cast(float, z_start) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpaceStructure) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SpaceStructure) if subclass is not None: return subclass(*args_, **kwargs_) if SpaceStructure.subclass: return SpaceStructure.subclass(*args_, **kwargs_) else: return SpaceStructure(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpaceStructure', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpaceStructure') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpaceStructure", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpaceStructure") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpaceStructure') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SpaceStructure" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpaceStructure', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpaceStructure'): - if self.x_spacing is not None and 'x_spacing' not in already_processed: - already_processed.add('x_spacing') - outfile.write(' xSpacing="%s"' % self.gds_format_float(self.x_spacing, input_name='xSpacing')) - if self.y_spacing is not None and 'y_spacing' not in already_processed: - already_processed.add('y_spacing') - outfile.write(' ySpacing="%s"' % self.gds_format_float(self.y_spacing, input_name='ySpacing')) - if self.z_spacing is not None and 'z_spacing' not in already_processed: - already_processed.add('z_spacing') - outfile.write(' zSpacing="%s"' % self.gds_format_float(self.z_spacing, input_name='zSpacing')) - if self.x_start != 0 and 'x_start' not in already_processed: - already_processed.add('x_start') - outfile.write(' xStart="%s"' % self.gds_format_float(self.x_start, input_name='xStart')) - if self.y_start != 0 and 'y_start' not in already_processed: - already_processed.add('y_start') - outfile.write(' yStart="%s"' % self.gds_format_float(self.y_start, input_name='yStart')) - if self.z_start != 0 and 'z_start' not in already_processed: - already_processed.add('z_start') - outfile.write(' zStart="%s"' % self.gds_format_float(self.z_start, input_name='zStart')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpaceStructure', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpaceStructure", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpaceStructure", + ): + if self.x_spacing is not None and "x_spacing" not in already_processed: + already_processed.add("x_spacing") + outfile.write( + ' xSpacing="%s"' + % self.gds_format_float(self.x_spacing, input_name="xSpacing") + ) + if self.y_spacing is not None and "y_spacing" not in already_processed: + already_processed.add("y_spacing") + outfile.write( + ' ySpacing="%s"' + % self.gds_format_float(self.y_spacing, input_name="ySpacing") + ) + if self.z_spacing is not None and "z_spacing" not in already_processed: + already_processed.add("z_spacing") + outfile.write( + ' zSpacing="%s"' + % self.gds_format_float(self.z_spacing, input_name="zSpacing") + ) + if self.x_start != 0 and "x_start" not in already_processed: + already_processed.add("x_start") + outfile.write( + ' xStart="%s"' + % self.gds_format_float(self.x_start, input_name="xStart") + ) + if self.y_start != 0 and "y_start" not in already_processed: + already_processed.add("y_start") + outfile.write( + ' yStart="%s"' + % self.gds_format_float(self.y_start, input_name="yStart") + ) + if self.z_start != 0 and "z_start" not in already_processed: + already_processed.add("z_start") + outfile.write( + ' zStart="%s"' + % self.gds_format_float(self.z_start, input_name="zStart") + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpaceStructure", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -5427,133 +9475,227 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xSpacing', node) - if value is not None and 'xSpacing' not in already_processed: - already_processed.add('xSpacing') + value = find_attr_value_("xSpacing", node) + if value is not None and "xSpacing" not in already_processed: + already_processed.add("xSpacing") try: self.x_spacing = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (xSpacing): %s' % exp) - value = find_attr_value_('ySpacing', node) - if value is not None and 'ySpacing' not in already_processed: - already_processed.add('ySpacing') + raise ValueError("Bad float/double attribute (xSpacing): %s" % exp) + value = find_attr_value_("ySpacing", node) + if value is not None and "ySpacing" not in already_processed: + already_processed.add("ySpacing") try: self.y_spacing = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (ySpacing): %s' % exp) - value = find_attr_value_('zSpacing', node) - if value is not None and 'zSpacing' not in already_processed: - already_processed.add('zSpacing') + raise ValueError("Bad float/double attribute (ySpacing): %s" % exp) + value = find_attr_value_("zSpacing", node) + if value is not None and "zSpacing" not in already_processed: + already_processed.add("zSpacing") try: self.z_spacing = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (zSpacing): %s' % exp) - value = find_attr_value_('xStart', node) - if value is not None and 'xStart' not in already_processed: - already_processed.add('xStart') + raise ValueError("Bad float/double attribute (zSpacing): %s" % exp) + value = find_attr_value_("xStart", node) + if value is not None and "xStart" not in already_processed: + already_processed.add("xStart") try: self.x_start = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (xStart): %s' % exp) - value = find_attr_value_('yStart', node) - if value is not None and 'yStart' not in already_processed: - already_processed.add('yStart') + raise ValueError("Bad float/double attribute (xStart): %s" % exp) + value = find_attr_value_("yStart", node) + if value is not None and "yStart" not in already_processed: + already_processed.add("yStart") try: self.y_start = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (yStart): %s' % exp) - value = find_attr_value_('zStart', node) - if value is not None and 'zStart' not in already_processed: - already_processed.add('zStart') + raise ValueError("Bad float/double attribute (yStart): %s" % exp) + value = find_attr_value_("zStart", node) + if value is not None and "zStart" not in already_processed: + already_processed.add("zStart") try: self.z_start = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (zStart): %s' % exp) + raise ValueError("Bad float/double attribute (zStart): %s" % exp) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class SpaceStructure class Layout(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('spaces', 'NmlId', 0, 1, {'use': 'optional'}), - MemberSpec_('random', 'RandomLayout', 0, 0, {u'type': u'RandomLayout', u'name': u'random'}, 5), - MemberSpec_('grid', 'GridLayout', 0, 0, {u'type': u'GridLayout', u'name': u'grid'}, 5), - MemberSpec_('unstructured', 'UnstructuredLayout', 0, 0, {u'type': u'UnstructuredLayout', u'name': u'unstructured'}, 5), + MemberSpec_("spaces", "NmlId", 0, 1, {"use": "optional"}), + MemberSpec_( + "random", + "RandomLayout", + 0, + 0, + {u"type": u"RandomLayout", u"name": u"random"}, + 5, + ), + MemberSpec_( + "grid", "GridLayout", 0, 0, {u"type": u"GridLayout", u"name": u"grid"}, 5 + ), + MemberSpec_( + "unstructured", + "UnstructuredLayout", + 0, + 0, + {u"type": u"UnstructuredLayout", u"name": u"unstructured"}, + 5, + ), ] subclass = None superclass = None - def __init__(self, spaces=None, random=None, grid=None, unstructured=None, **kwargs_): + + def __init__( + self, spaces=None, random=None, grid=None, unstructured=None, **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.spaces = _cast(None, spaces) self.random = random self.grid = grid self.unstructured = unstructured + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Layout) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Layout) if subclass is not None: return subclass(*args_, **kwargs_) if Layout.subclass: return Layout.subclass(*args_, **kwargs_) else: return Layout(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): if ( - self.random is not None or - self.grid is not None or - self.unstructured is not None + self.random is not None + or self.grid is not None + or self.unstructured is not None ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Layout', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Layout') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Layout", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Layout") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Layout') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Layout" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Layout', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Layout", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Layout'): - if self.spaces is not None and 'spaces' not in already_processed: - already_processed.add('spaces') - outfile.write(' space=%s' % (quote_attrib(self.spaces), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Layout', fromsubclass_=False, pretty_print=True): + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Layout" + ): + if self.spaces is not None and "spaces" not in already_processed: + already_processed.add("spaces") + outfile.write(" space=%s" % (quote_attrib(self.spaces),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Layout", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.random is not None: - self.random.export(outfile, level, namespaceprefix_, namespacedef_='', name_='random', pretty_print=pretty_print) + self.random.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="random", + pretty_print=pretty_print, + ) if self.grid is not None: - self.grid.export(outfile, level, namespaceprefix_, namespacedef_='', name_='grid', pretty_print=pretty_print) + self.grid.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="grid", + pretty_print=pretty_print, + ) if self.unstructured is not None: - self.unstructured.export(outfile, level, namespaceprefix_, namespacedef_='', name_='unstructured', pretty_print=pretty_print) + self.unstructured.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="unstructured", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -5561,85 +9703,143 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('space', node) - if value is not None and 'space' not in already_processed: - already_processed.add('space') + value = find_attr_value_("space", node) + if value is not None and "space" not in already_processed: + already_processed.add("space") self.spaces = value - self.validate_NmlId(self.spaces) # validate type NmlId + self.validate_NmlId(self.spaces) # validate type NmlId + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'random': + if nodeName_ == "random": obj_ = RandomLayout.factory(parent_object_=self) obj_.build(child_) self.random = obj_ - obj_.original_tagname_ = 'random' - elif nodeName_ == 'grid': + obj_.original_tagname_ = "random" + elif nodeName_ == "grid": obj_ = GridLayout.factory(parent_object_=self) obj_.build(child_) self.grid = obj_ - obj_.original_tagname_ = 'grid' - elif nodeName_ == 'unstructured': + obj_.original_tagname_ = "grid" + elif nodeName_ == "unstructured": obj_ = UnstructuredLayout.factory(parent_object_=self) obj_.build(child_) self.unstructured = obj_ - obj_.original_tagname_ = 'unstructured' + obj_.original_tagname_ = "unstructured" + + # end class Layout class UnstructuredLayout(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('number', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), + MemberSpec_("number", "xs:nonNegativeInteger", 0, 1, {"use": "optional"}), ] subclass = None superclass = None + def __init__(self, number=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.number = _cast(int, number) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, UnstructuredLayout) + CurrentSubclassModule_, UnstructuredLayout + ) if subclass is not None: return subclass(*args_, **kwargs_) if UnstructuredLayout.subclass: return UnstructuredLayout.subclass(*args_, **kwargs_) else: return UnstructuredLayout(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='UnstructuredLayout', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('UnstructuredLayout') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="UnstructuredLayout", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("UnstructuredLayout") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UnstructuredLayout') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="UnstructuredLayout", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UnstructuredLayout', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UnstructuredLayout'): - if self.number is not None and 'number' not in already_processed: - already_processed.add('number') - outfile.write(' number="%s"' % self.gds_format_integer(self.number, input_name='number')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='UnstructuredLayout', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="UnstructuredLayout", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="UnstructuredLayout", + ): + if self.number is not None and "number" not in already_processed: + already_processed.add("number") + outfile.write( + ' number="%s"' + % self.gds_format_integer(self.number, input_name="number") + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="UnstructuredLayout", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -5647,87 +9847,148 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('number', node) - if value is not None and 'number' not in already_processed: - already_processed.add('number') + value = find_attr_value_("number", node) + if value is not None and "number" not in already_processed: + already_processed.add("number") try: self.number = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.number < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') + raise_parse_error(node, "Invalid NonNegativeInteger") + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class UnstructuredLayout class RandomLayout(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('number', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('regions', 'NmlId', 0, 1, {'use': 'optional'}), + MemberSpec_("number", "xs:nonNegativeInteger", 0, 1, {"use": "optional"}), + MemberSpec_("regions", "NmlId", 0, 1, {"use": "optional"}), ] subclass = None superclass = None + def __init__(self, number=None, regions=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.number = _cast(int, number) self.regions = _cast(None, regions) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, RandomLayout) + subclass = getSubclassFromModule_(CurrentSubclassModule_, RandomLayout) if subclass is not None: return subclass(*args_, **kwargs_) if RandomLayout.subclass: return RandomLayout.subclass(*args_, **kwargs_) else: return RandomLayout(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): - if ( + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) - ): + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RandomLayout', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('RandomLayout') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="RandomLayout", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("RandomLayout") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RandomLayout') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="RandomLayout" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RandomLayout', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RandomLayout'): - if self.number is not None and 'number' not in already_processed: - already_processed.add('number') - outfile.write(' number="%s"' % self.gds_format_integer(self.number, input_name='number')) - if self.regions is not None and 'regions' not in already_processed: - already_processed.add('regions') - outfile.write(' region=%s' % (quote_attrib(self.regions), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RandomLayout', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="RandomLayout", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="RandomLayout", + ): + if self.number is not None and "number" not in already_processed: + already_processed.add("number") + outfile.write( + ' number="%s"' + % self.gds_format_integer(self.number, input_name="number") + ) + if self.regions is not None and "regions" not in already_processed: + already_processed.add("regions") + outfile.write(" region=%s" % (quote_attrib(self.regions),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="RandomLayout", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -5735,90 +9996,140 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('number', node) - if value is not None and 'number' not in already_processed: - already_processed.add('number') + value = find_attr_value_("number", node) + if value is not None and "number" not in already_processed: + already_processed.add("number") try: self.number = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.number < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - value = find_attr_value_('region', node) - if value is not None and 'region' not in already_processed: - already_processed.add('region') + raise_parse_error(node, "Invalid NonNegativeInteger") + value = find_attr_value_("region", node) + if value is not None and "region" not in already_processed: + already_processed.add("region") self.regions = value - self.validate_NmlId(self.regions) # validate type NmlId + self.validate_NmlId(self.regions) # validate type NmlId + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class RandomLayout class GridLayout(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('x_size', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('y_size', 'xs:nonNegativeInteger', 0, 1, {'use': u'optional'}), - MemberSpec_('z_size', 'xs:nonNegativeInteger', 0, 1, {'use': u'optional'}), + MemberSpec_("x_size", "xs:nonNegativeInteger", 0, 1, {"use": "optional"}), + MemberSpec_("y_size", "xs:nonNegativeInteger", 0, 1, {"use": u"optional"}), + MemberSpec_("z_size", "xs:nonNegativeInteger", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None + def __init__(self, x_size=None, y_size=None, z_size=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.x_size = _cast(int, x_size) self.y_size = _cast(int, y_size) self.z_size = _cast(int, z_size) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GridLayout) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GridLayout) if subclass is not None: return subclass(*args_, **kwargs_) if GridLayout.subclass: return GridLayout.subclass(*args_, **kwargs_) else: return GridLayout(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GridLayout', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GridLayout') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GridLayout", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GridLayout") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GridLayout') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GridLayout" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GridLayout', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GridLayout'): - if self.x_size is not None and 'x_size' not in already_processed: - already_processed.add('x_size') - outfile.write(' xSize="%s"' % self.gds_format_integer(self.x_size, input_name='xSize')) - if self.y_size is not None and 'y_size' not in already_processed: - already_processed.add('y_size') - outfile.write(' ySize="%s"' % self.gds_format_integer(self.y_size, input_name='ySize')) - if self.z_size is not None and 'z_size' not in already_processed: - already_processed.add('z_size') - outfile.write(' zSize="%s"' % self.gds_format_integer(self.z_size, input_name='zSize')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GridLayout', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GridLayout", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="GridLayout" + ): + if self.x_size is not None and "x_size" not in already_processed: + already_processed.add("x_size") + outfile.write( + ' xSize="%s"' % self.gds_format_integer(self.x_size, input_name="xSize") + ) + if self.y_size is not None and "y_size" not in already_processed: + already_processed.add("y_size") + outfile.write( + ' ySize="%s"' % self.gds_format_integer(self.y_size, input_name="ySize") + ) + if self.z_size is not None and "z_size" not in already_processed: + already_processed.add("z_size") + outfile.write( + ' zSize="%s"' % self.gds_format_integer(self.z_size, input_name="zSize") + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GridLayout", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -5826,116 +10137,176 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xSize', node) - if value is not None and 'xSize' not in already_processed: - already_processed.add('xSize') + value = find_attr_value_("xSize", node) + if value is not None and "xSize" not in already_processed: + already_processed.add("xSize") try: self.x_size = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.x_size < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - value = find_attr_value_('ySize', node) - if value is not None and 'ySize' not in already_processed: - already_processed.add('ySize') + raise_parse_error(node, "Invalid NonNegativeInteger") + value = find_attr_value_("ySize", node) + if value is not None and "ySize" not in already_processed: + already_processed.add("ySize") try: self.y_size = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.y_size < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - value = find_attr_value_('zSize', node) - if value is not None and 'zSize' not in already_processed: - already_processed.add('zSize') + raise_parse_error(node, "Invalid NonNegativeInteger") + value = find_attr_value_("zSize", node) + if value is not None and "zSize" not in already_processed: + already_processed.add("zSize") try: self.z_size = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.z_size < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') + raise_parse_error(node, "Invalid NonNegativeInteger") + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class GridLayout class Instance(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('id', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('i', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('j', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('k', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('location', 'Location', 0, 0, {u'type': u'Location', u'name': u'location'}, None), + MemberSpec_("id", "xs:nonNegativeInteger", 0, 1, {"use": "optional"}), + MemberSpec_("i", "xs:nonNegativeInteger", 0, 1, {"use": "optional"}), + MemberSpec_("j", "xs:nonNegativeInteger", 0, 1, {"use": "optional"}), + MemberSpec_("k", "xs:nonNegativeInteger", 0, 1, {"use": "optional"}), + MemberSpec_( + "location", + "Location", + 0, + 0, + {u"type": u"Location", u"name": u"location"}, + None, + ), ] subclass = None superclass = None + def __init__(self, id=None, i=None, j=None, k=None, location=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.id = _cast(int, id) self.i = _cast(int, i) self.j = _cast(int, j) self.k = _cast(int, k) self.location = location + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Instance) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Instance) if subclass is not None: return subclass(*args_, **kwargs_) if Instance.subclass: return Instance.subclass(*args_, **kwargs_) else: return Instance(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - self.location is not None - ): + if self.location is not None: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Instance', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Instance') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Instance", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Instance") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Instance') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Instance" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Instance', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Instance", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Instance'): - if self.id is not None and 'id' not in already_processed: - already_processed.add('id') - outfile.write(' id="%s"' % self.gds_format_integer(self.id, input_name='id')) - if self.i is not None and 'i' not in already_processed: - already_processed.add('i') - outfile.write(' i="%s"' % self.gds_format_integer(self.i, input_name='i')) - if self.j is not None and 'j' not in already_processed: - already_processed.add('j') - outfile.write(' j="%s"' % self.gds_format_integer(self.j, input_name='j')) - if self.k is not None and 'k' not in already_processed: - already_processed.add('k') - outfile.write(' k="%s"' % self.gds_format_integer(self.k, input_name='k')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Instance', fromsubclass_=False, pretty_print=True): + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Instance" + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + ' id="%s"' % self.gds_format_integer(self.id, input_name="id") + ) + if self.i is not None and "i" not in already_processed: + already_processed.add("i") + outfile.write(' i="%s"' % self.gds_format_integer(self.i, input_name="i")) + if self.j is not None and "j" not in already_processed: + already_processed.add("j") + outfile.write(' j="%s"' % self.gds_format_integer(self.j, input_name="j")) + if self.k is not None and "k" not in already_processed: + already_processed.add("k") + outfile.write(' k="%s"' % self.gds_format_integer(self.k, input_name="k")) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Instance", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.location is not None: - self.location.export(outfile, level, namespaceprefix_, namespacedef_='', name_='location', pretty_print=pretty_print) + self.location.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="location", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -5943,125 +10314,172 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('id', node) - if value is not None and 'id' not in already_processed: - already_processed.add('id') + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") try: self.id = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.id < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - value = find_attr_value_('i', node) - if value is not None and 'i' not in already_processed: - already_processed.add('i') + raise_parse_error(node, "Invalid NonNegativeInteger") + value = find_attr_value_("i", node) + if value is not None and "i" not in already_processed: + already_processed.add("i") try: self.i = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.i < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - value = find_attr_value_('j', node) - if value is not None and 'j' not in already_processed: - already_processed.add('j') + raise_parse_error(node, "Invalid NonNegativeInteger") + value = find_attr_value_("j", node) + if value is not None and "j" not in already_processed: + already_processed.add("j") try: self.j = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.j < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - value = find_attr_value_('k', node) - if value is not None and 'k' not in already_processed: - already_processed.add('k') + raise_parse_error(node, "Invalid NonNegativeInteger") + value = find_attr_value_("k", node) + if value is not None and "k" not in already_processed: + already_processed.add("k") try: self.k = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.k < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') + raise_parse_error(node, "Invalid NonNegativeInteger") + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'location': + if nodeName_ == "location": obj_ = Location.factory(parent_object_=self) obj_.build(child_) self.location = obj_ - obj_.original_tagname_ = 'location' + obj_.original_tagname_ = "location" def __str__(self): - return "Instance "+str(self.id)+ (" at location: "+str(self.location) if self.location else "") + return ( + "Instance " + + str(self.id) + + (" at location: " + str(self.location) if self.location else "") + ) def __repr__(self): return str(self) + # end class Instance class Location(GeneratedsSuper): member_data_items_ = [ - MemberSpec_('x', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('y', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('z', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("x", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("y", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("z", "xs:float", 0, 0, {"use": u"required"}), ] subclass = None superclass = None + def __init__(self, x=None, y=None, z=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.x = _cast(float, x) self.y = _cast(float, y) self.z = _cast(float, z) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Location) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Location) if subclass is not None: return subclass(*args_, **kwargs_) if Location.subclass: return Location.subclass(*args_, **kwargs_) else: return Location(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Location', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Location') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Location", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Location") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Location') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Location" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Location', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Location'): - if self.x is not None and 'x' not in already_processed: - already_processed.add('x') - outfile.write(' x="%s"' % self.gds_format_float(self.x, input_name='x')) - if self.y is not None and 'y' not in already_processed: - already_processed.add('y') - outfile.write(' y="%s"' % self.gds_format_float(self.y, input_name='y')) - if self.z is not None and 'z' not in already_processed: - already_processed.add('z') - outfile.write(' z="%s"' % self.gds_format_float(self.z, input_name='z')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Location', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Location", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Location" + ): + if self.x is not None and "x" not in already_processed: + already_processed.add("x") + outfile.write(' x="%s"' % self.gds_format_float(self.x, input_name="x")) + if self.y is not None and "y" not in already_processed: + already_processed.add("y") + outfile.write(' y="%s"' % self.gds_format_float(self.y, input_name="y")) + if self.z is not None and "z" not in already_processed: + already_processed.add("z") + outfile.write(' z="%s"' % self.gds_format_float(self.z, input_name="z")) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Location", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -6069,46 +10487,57 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('x', node) - if value is not None and 'x' not in already_processed: - already_processed.add('x') + value = find_attr_value_("x", node) + if value is not None and "x" not in already_processed: + already_processed.add("x") try: self.x = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (x): %s' % exp) - value = find_attr_value_('y', node) - if value is not None and 'y' not in already_processed: - already_processed.add('y') + raise ValueError("Bad float/double attribute (x): %s" % exp) + value = find_attr_value_("y", node) + if value is not None and "y" not in already_processed: + already_processed.add("y") try: self.y = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (y): %s' % exp) - value = find_attr_value_('z', node) - if value is not None and 'z' not in already_processed: - already_processed.add('z') + raise ValueError("Bad float/double attribute (y): %s" % exp) + value = find_attr_value_("z", node) + if value is not None and "z" not in already_processed: + already_processed.add("z") try: self.z = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (z): %s' % exp) + raise ValueError("Bad float/double attribute (z): %s" % exp) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass - def _format(self,value): + def _format(self, value): - if int(value)==value: + if int(value) == value: return str(int(value)) else: - return '%.4f' % value + return "%.4f" % value def __str__(self): - return "("+ self._format(self.x) +", "+ self._format(self.y) +", "+ self._format(self.z) +")" + return ( + "(" + + self._format(self.x) + + ", " + + self._format(self.y) + + ", " + + self._format(self.z) + + ")" + ) def __repr__(self): return str(self) + # end class Location @@ -6116,81 +10545,167 @@ class SynapticConnection(GeneratedsSuper): """Single explicit connection. Introduced to test connections in LEMS. Will probably be removed in favour of connections wrapped in projection element""" + member_data_items_ = [ - MemberSpec_('from_', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('to', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('synapse', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('destination', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_("from_", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("to", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("synapse", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("destination", "NmlId", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None + def __init__(self, from_=None, to=None, synapse=None, destination=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.from_ = _cast(None, from_) self.to = _cast(None, to) self.synapse = _cast(None, synapse) self.destination = _cast(None, destination) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, SynapticConnection) + CurrentSubclassModule_, SynapticConnection + ) if subclass is not None: return subclass(*args_, **kwargs_) if SynapticConnection.subclass: return SynapticConnection.subclass(*args_, **kwargs_) else: return SynapticConnection(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): - if ( + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) - ): + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SynapticConnection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SynapticConnection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SynapticConnection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SynapticConnection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SynapticConnection') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SynapticConnection", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SynapticConnection', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SynapticConnection'): - if self.from_ is not None and 'from_' not in already_processed: - already_processed.add('from_') - outfile.write(' from=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.from_), input_name='from')), )) - if self.to is not None and 'to' not in already_processed: - already_processed.add('to') - outfile.write(' to=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.to), input_name='to')), )) - if self.synapse is not None and 'synapse' not in already_processed: - already_processed.add('synapse') - outfile.write(' synapse=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse), input_name='synapse')), )) - if self.destination is not None and 'destination' not in already_processed: - already_processed.add('destination') - outfile.write(' destination=%s' % (quote_attrib(self.destination), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SynapticConnection', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SynapticConnection", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SynapticConnection", + ): + if self.from_ is not None and "from_" not in already_processed: + already_processed.add("from_") + outfile.write( + " from=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.from_), input_name="from" + ) + ), + ) + ) + if self.to is not None and "to" not in already_processed: + already_processed.add("to") + outfile.write( + " to=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.to), input_name="to") + ), + ) + ) + if self.synapse is not None and "synapse" not in already_processed: + already_processed.add("synapse") + outfile.write( + " synapse=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse), input_name="synapse" + ) + ), + ) + ) + if self.destination is not None and "destination" not in already_processed: + already_processed.add("destination") + outfile.write(" destination=%s" % (quote_attrib(self.destination),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SynapticConnection", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -6198,46 +10713,60 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('from', node) - if value is not None and 'from' not in already_processed: - already_processed.add('from') + value = find_attr_value_("from", node) + if value is not None and "from" not in already_processed: + already_processed.add("from") self.from_ = value - value = find_attr_value_('to', node) - if value is not None and 'to' not in already_processed: - already_processed.add('to') + value = find_attr_value_("to", node) + if value is not None and "to" not in already_processed: + already_processed.add("to") self.to = value - value = find_attr_value_('synapse', node) - if value is not None and 'synapse' not in already_processed: - already_processed.add('synapse') + value = find_attr_value_("synapse", node) + if value is not None and "synapse" not in already_processed: + already_processed.add("synapse") self.synapse = value - value = find_attr_value_('destination', node) - if value is not None and 'destination' not in already_processed: - already_processed.add('destination') + value = find_attr_value_("destination", node) + if value is not None and "destination" not in already_processed: + already_processed.add("destination") self.destination = value - self.validate_NmlId(self.destination) # validate type NmlId + self.validate_NmlId(self.destination) # validate type NmlId + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass - def _get_cell_id(self,ref): + def _get_cell_id(self, ref): """Get cell ID""" - if '[' in ref: - return int(ref.split('[')[1].split(']')[0]) + if "[" in ref: + return int(ref.split("[")[1].split("]")[0]) else: - return int(ref.split('/')[2]) + return int(ref.split("/")[2]) - def _get_population(self,ref): + def _get_population(self, ref): """Get population""" - if '[' in ref: - return ref.split('[')[0] + if "[" in ref: + return ref.split("[")[0] else: - return ref.split('/')[0] + return ref.split("/")[0] def __str__(self): - dest = self.destination if self.destination else 'unspecified' - return "Synaptic connection from "+str(self._get_population(self.from_))+"(cell "+str(self._get_cell_id(self.from_))+ ") -> "+str(self._get_population(self.to))+"(cell "+str(self._get_cell_id(self.to))+"), syn: "+self.synapse+", destination: "+dest - + dest = self.destination if self.destination else "unspecified" + return ( + "Synaptic connection from " + + str(self._get_population(self.from_)) + + "(cell " + + str(self._get_cell_id(self.from_)) + + ") -> " + + str(self._get_population(self.to)) + + "(cell " + + str(self._get_cell_id(self.to)) + + "), syn: " + + self.synapse + + ", destination: " + + dest + ) # end class SynapticConnection @@ -6246,69 +10775,142 @@ class ExplicitInput(GeneratedsSuper): """Single explicit input. Introduced to test inputs in LEMS. Will probably be removed in favour of inputs wrapped in inputList element""" + member_data_items_ = [ - MemberSpec_('target', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('input', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('destination', 'xs:string', 0, 1, {'use': 'optional'}), + MemberSpec_("target", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("input", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("destination", "xs:string", 0, 1, {"use": "optional"}), ] subclass = None superclass = None + def __init__(self, target=None, input=None, destination=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.target = _cast(None, target) self.input = _cast(None, input) self.destination = _cast(None, destination) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExplicitInput) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ExplicitInput) if subclass is not None: return subclass(*args_, **kwargs_) if ExplicitInput.subclass: return ExplicitInput.subclass(*args_, **kwargs_) else: return ExplicitInput(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExplicitInput', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExplicitInput') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExplicitInput", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExplicitInput") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExplicitInput') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExplicitInput" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExplicitInput', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExplicitInput'): - if self.target is not None and 'target' not in already_processed: - already_processed.add('target') - outfile.write(' target=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.target), input_name='target')), )) - if self.input is not None and 'input' not in already_processed: - already_processed.add('input') - outfile.write(' input=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.input), input_name='input')), )) - if self.destination is not None and 'destination' not in already_processed: - already_processed.add('destination') - outfile.write(' destination=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.destination), input_name='destination')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExplicitInput', fromsubclass_=False, pretty_print=True): + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExplicitInput", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExplicitInput", + ): + if self.target is not None and "target" not in already_processed: + already_processed.add("target") + outfile.write( + " target=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.target), input_name="target" + ) + ), + ) + ) + if self.input is not None and "input" not in already_processed: + already_processed.add("input") + outfile.write( + " input=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.input), input_name="input" + ) + ), + ) + ) + if self.destination is not None and "destination" not in already_processed: + already_processed.add("destination") + outfile.write( + " destination=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.destination), input_name="destination" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExplicitInput", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -6316,30 +10918,32 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('target', node) - if value is not None and 'target' not in already_processed: - already_processed.add('target') + value = find_attr_value_("target", node) + if value is not None and "target" not in already_processed: + already_processed.add("target") self.target = value - value = find_attr_value_('input', node) - if value is not None and 'input' not in already_processed: - already_processed.add('input') + value = find_attr_value_("input", node) + if value is not None and "input" not in already_processed: + already_processed.add("input") self.input = value - value = find_attr_value_('destination', node) - if value is not None and 'destination' not in already_processed: - already_processed.add('destination') + value = find_attr_value_("destination", node) + if value is not None and "destination" not in already_processed: + already_processed.add("destination") self.destination = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass def _get_cell_id(self, id_string): - if '[' in id_string: - return int(id_string.split('[')[1].split(']')[0]) + if "[" in id_string: + return int(id_string.split("[")[1].split("]")[0]) else: - return int(id_string.split('/')[2]) + return int(id_string.split("/")[2]) def get_target_cell_id(self): - """Get ID of target cell. """ + """Get ID of target cell.""" return self._get_cell_id(self.target) @@ -6360,130 +10964,230 @@ def get_fraction_along(self): def __str__(self): - return "Input "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'%.6f'%self.get_fraction_along()+")" - - - def get_target_cell_id(self,): + return ( + "Input " + + str(self.id) + + ": " + + str(self.get_target_cell_id()) + + ":" + + str(self.get_segment_id()) + + "(" + + "%.6f" % self.get_fraction_along() + + ")" + ) + + def get_target_cell_id( + self, + ): """Get target cell ID""" - if '[' in self.target: - return int(self.target.split('[')[1].split(']')[0]) + if "[" in self.target: + return int(self.target.split("[")[1].split("]")[0]) else: - return int(self.target.split('/')[2]) + return int(self.target.split("/")[2]) - def get_target_population(self,): + def get_target_population( + self, + ): """Get target population.""" - if '[' in self.target: - return self.target.split('[')[0] + if "[" in self.target: + return self.target.split("[")[0] else: - return self.target.split('/')[0] + return self.target.split("/")[0] def __str__(self): - dest = self.destination if self.destination else 'unspecified' - return "Explicit Input of type "+str(self.input)+" to "+self.get_target_population()+"(cell "+str(self.get_target_cell_id())+ "), destination: "+dest - + dest = self.destination if self.destination else "unspecified" + return ( + "Explicit Input of type " + + str(self.input) + + " to " + + self.get_target_population() + + "(cell " + + str(self.get_target_cell_id()) + + "), destination: " + + dest + ) # end class ExplicitInput class Input(GeneratedsSuper): """Individual input to the cell specified by target""" + member_data_items_ = [ - MemberSpec_('id', 'NonNegativeInteger', 0, 0, {'use': u'required'}), - MemberSpec_('target', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('destination', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('segment_id', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), + MemberSpec_("id", "NonNegativeInteger", 0, 0, {"use": u"required"}), + MemberSpec_("target", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("destination", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("segment_id", "NonNegativeInteger", 0, 1, {"use": "optional"}), + MemberSpec_("fraction_along", "ZeroToOne", 0, 1, {"use": "optional"}), ] subclass = None superclass = None - def __init__(self, id=None, target=None, destination=None, segment_id=None, fraction_along=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + id=None, + target=None, + destination=None, + segment_id=None, + fraction_along=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.id = _cast(int, id) self.target = _cast(None, target) self.destination = _cast(None, destination) self.segment_id = _cast(int, segment_id) self.fraction_along = _cast(float, fraction_along) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Input) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Input) if subclass is not None: return subclass(*args_, **kwargs_) if Input.subclass: return Input.subclass(*args_, **kwargs_) else: return Input(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_: pass + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. if value is not None and Validate_simpletypes_: if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' + % {"value": value} + ) if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) - def hasContent_(self): - if ( + warnings_.warn( + 'Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' + % {"value": value} + ) - ): + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Input', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Input') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Input", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Input") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Input') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Input" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Input', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Input'): - if self.id is not None and 'id' not in already_processed: - already_processed.add('id') - outfile.write(' id=%s' % (quote_attrib(self.id), )) - if self.target is not None and 'target' not in already_processed: - already_processed.add('target') - outfile.write(' target=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.target), input_name='target')), )) - if self.destination is not None and 'destination' not in already_processed: - already_processed.add('destination') - outfile.write(' destination=%s' % (quote_attrib(self.destination), )) - if self.segment_id is not None and 'segment_id' not in already_processed: - already_processed.add('segment_id') - outfile.write(' segmentId=%s' % (quote_attrib(self.segment_id), )) - if self.fraction_along is not None and 'fraction_along' not in already_processed: - already_processed.add('fraction_along') - outfile.write(' fractionAlong=%s' % (quote_attrib(self.fraction_along), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Input", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Input" + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write(" id=%s" % (quote_attrib(self.id),)) + if self.target is not None and "target" not in already_processed: + already_processed.add("target") + outfile.write( + " target=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.target), input_name="target" + ) + ), + ) + ) + if self.destination is not None and "destination" not in already_processed: + already_processed.add("destination") + outfile.write(" destination=%s" % (quote_attrib(self.destination),)) + if self.segment_id is not None and "segment_id" not in already_processed: + already_processed.add("segment_id") + outfile.write(" segmentId=%s" % (quote_attrib(self.segment_id),)) + if ( + self.fraction_along is not None + and "fraction_along" not in already_processed + ): + already_processed.add("fraction_along") + outfile.write(" fractionAlong=%s" % (quote_attrib(self.fraction_along),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Input', fromsubclass_=False, pretty_print=True): + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Input", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -6491,59 +11195,65 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('id', node) - if value is not None and 'id' not in already_processed: - already_processed.add('id') + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") try: self.id = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.id < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.id) # validate type NonNegativeInteger - value = find_attr_value_('target', node) - if value is not None and 'target' not in already_processed: - already_processed.add('target') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.id + ) # validate type NonNegativeInteger + value = find_attr_value_("target", node) + if value is not None and "target" not in already_processed: + already_processed.add("target") self.target = value - value = find_attr_value_('destination', node) - if value is not None and 'destination' not in already_processed: - already_processed.add('destination') + value = find_attr_value_("destination", node) + if value is not None and "destination" not in already_processed: + already_processed.add("destination") self.destination = value - self.validate_NmlId(self.destination) # validate type NmlId - value = find_attr_value_('segmentId', node) - if value is not None and 'segmentId' not in already_processed: - already_processed.add('segmentId') + self.validate_NmlId(self.destination) # validate type NmlId + value = find_attr_value_("segmentId", node) + if value is not None and "segmentId" not in already_processed: + already_processed.add("segmentId") try: self.segment_id = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.segment_id < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.segment_id) # validate type NonNegativeInteger - value = find_attr_value_('fractionAlong', node) - if value is not None and 'fractionAlong' not in already_processed: - already_processed.add('fractionAlong') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.segment_id + ) # validate type NonNegativeInteger + value = find_attr_value_("fractionAlong", node) + if value is not None and "fractionAlong" not in already_processed: + already_processed.add("fractionAlong") try: self.fraction_along = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (fractionAlong): %s' % exp) - self.validate_ZeroToOne(self.fraction_along) # validate type ZeroToOne - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise ValueError("Bad float/double attribute (fractionAlong): %s" % exp) + self.validate_ZeroToOne(self.fraction_along) # validate type ZeroToOne + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass def _get_cell_id(self, id_string): - if '[' in id_string: - return int(id_string.split('[')[1].split(']')[0]) + if "[" in id_string: + return int(id_string.split("[")[1].split("]")[0]) else: - return int(id_string.split('/')[2]) + return int(id_string.split("/")[2]) def get_target_cell_id(self): - """Get ID of target cell. """ + """Get ID of target cell.""" return self._get_cell_id(self.target) @@ -6564,7 +11274,17 @@ def get_fraction_along(self): def __str__(self): - return "Input "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'%.6f'%self.get_fraction_along()+")" + return ( + "Input " + + str(self.id) + + ": " + + str(self.get_target_cell_id()) + + ":" + + str(self.get_segment_id()) + + "(" + + "%.6f" % self.get_fraction_along() + + ")" + ) # end class Input @@ -6572,62 +11292,120 @@ def __str__(self): class InputW(Input): """Individual input to the cell specified by target. Includes setting of _weight for the connection""" + member_data_items_ = [ - MemberSpec_('weight', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("weight", "xs:float", 0, 0, {"use": u"required"}), ] subclass = None superclass = Input - def __init__(self, id=None, target=None, destination=None, segment_id=None, fraction_along=None, weight=None, **kwargs_): + + def __init__( + self, + id=None, + target=None, + destination=None, + segment_id=None, + fraction_along=None, + weight=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(InputW, self).__init__(id, target, destination, segment_id, fraction_along, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(InputW, self).__init__( + id, target, destination, segment_id, fraction_along, **kwargs_ + ) self.weight = _cast(float, weight) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, InputW) + subclass = getSubclassFromModule_(CurrentSubclassModule_, InputW) if subclass is not None: return subclass(*args_, **kwargs_) if InputW.subclass: return InputW.subclass(*args_, **kwargs_) else: return InputW(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(InputW, self).hasContent_() - ): + if super(InputW, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InputW', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InputW') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InputW", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InputW") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputW') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="InputW" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InputW', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InputW'): - super(InputW, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputW') - if self.weight is not None and 'weight' not in already_processed: - already_processed.add('weight') - outfile.write(' weight="%s"' % self.gds_format_float(self.weight, input_name='weight')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InputW', fromsubclass_=False, pretty_print=True): - super(InputW, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InputW", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="InputW" + ): + super(InputW, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="InputW" + ) + if self.weight is not None and "weight" not in already_processed: + already_processed.add("weight") + outfile.write( + ' weight="%s"' % self.gds_format_float(self.weight, input_name="weight") + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InputW", + fromsubclass_=False, + pretty_print=True, + ): + super(InputW, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -6635,15 +11413,17 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('weight', node) - if value is not None and 'weight' not in already_processed: - already_processed.add('weight') + value = find_attr_value_("weight", node) + if value is not None and "weight" not in already_processed: + already_processed.add("weight") try: self.weight = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (weight): %s' % exp) + raise ValueError("Bad float/double attribute (weight): %s" % exp) super(InputW, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(InputW, self).buildChildren(child_, node, nodeName_, True) pass @@ -6654,11 +11434,22 @@ def get_weight(self): If weight is not set, the default value of 1.0 is returned. """ - return float(self.weight) if self.weight!=None else 1.0 + return float(self.weight) if self.weight != None else 1.0 def __str__(self): - return "Input (weight) "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'%.6f'%self.get_fraction_along()+"), weight: "+'%.6f'%self.get_weight() + return ( + "Input (weight) " + + str(self.id) + + ": " + + str(self.get_target_cell_id()) + + ":" + + str(self.get_segment_id()) + + "(" + + "%.6f" % self.get_fraction_along() + + "), weight: " + + "%.6f" % self.get_weight() + ) # end class InputW @@ -6667,71 +11458,126 @@ class BaseWithoutId(GeneratedsSuper): """Base element without ID specified *yet*, e.g. for an element with a particular requirement on its id which does not comply with NmlId (e.g. Segment needs nonNegativeInteger).""" + member_data_items_ = [ - MemberSpec_('neuro_lex_id', 'NeuroLexId', 0, 1, {'use': u'optional'}), + MemberSpec_("neuro_lex_id", "NeuroLexId", 0, 1, {"use": u"optional"}), ] subclass = None superclass = None + def __init__(self, neuro_lex_id=None, extensiontype_=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") self.neuro_lex_id = _cast(None, neuro_lex_id) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseWithoutId) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BaseWithoutId) if subclass is not None: return subclass(*args_, **kwargs_) if BaseWithoutId.subclass: return BaseWithoutId.subclass(*args_, **kwargs_) else: return BaseWithoutId(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NeuroLexId(self, value): # Validate type NeuroLexId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NeuroLexId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NeuroLexId_patterns_, )) - validate_NeuroLexId_patterns_ = [[u'^[a-zA-Z0-9_:]*$']] - def hasContent_(self): - if ( + self.validate_NeuroLexId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NeuroLexId_patterns_, + ) + ) - ): + validate_NeuroLexId_patterns_ = [[u"^[a-zA-Z0-9_:]*$"]] + + def hasContent_(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseWithoutId', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseWithoutId') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseWithoutId", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseWithoutId") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseWithoutId') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseWithoutId" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseWithoutId', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseWithoutId'): - if self.neuro_lex_id is not None and 'neuro_lex_id' not in already_processed: - already_processed.add('neuro_lex_id') - outfile.write(' neuroLexId=%s' % (quote_attrib(self.neuro_lex_id), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseWithoutId", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseWithoutId", + ): + if self.neuro_lex_id is not None and "neuro_lex_id" not in already_processed: + already_processed.add("neuro_lex_id") + outfile.write(" neuroLexId=%s" % (quote_attrib(self.neuro_lex_id),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseWithoutId', fromsubclass_=False, pretty_print=True): + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseWithoutId", + fromsubclass_=False, + pretty_print=True, + ): pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -6739,89 +11585,156 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('neuroLexId', node) - if value is not None and 'neuroLexId' not in already_processed: - already_processed.add('neuroLexId') + value = find_attr_value_("neuroLexId", node) + if value is not None and "neuroLexId" not in already_processed: + already_processed.add("neuroLexId") self.neuro_lex_id = value - self.validate_NeuroLexId(self.neuro_lex_id) # validate type NeuroLexId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NeuroLexId(self.neuro_lex_id) # validate type NeuroLexId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass + + # end class BaseWithoutId class BaseNonNegativeIntegerId(BaseWithoutId): """Anything which can have a unique (within its parent) id, which must be an integer zero or greater.""" + member_data_items_ = [ - MemberSpec_('id', 'NonNegativeInteger', 0, 0, {'use': u'required'}), + MemberSpec_("id", "NonNegativeInteger", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseWithoutId + def __init__(self, neuro_lex_id=None, id=None, extensiontype_=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseNonNegativeIntegerId, self).__init__(neuro_lex_id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BaseNonNegativeIntegerId, self).__init__( + neuro_lex_id, extensiontype_, **kwargs_ + ) self.id = _cast(int, id) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseNonNegativeIntegerId) + CurrentSubclassModule_, BaseNonNegativeIntegerId + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseNonNegativeIntegerId.subclass: return BaseNonNegativeIntegerId.subclass(*args_, **kwargs_) else: return BaseNonNegativeIntegerId(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_: pass + def hasContent_(self): - if ( - super(BaseNonNegativeIntegerId, self).hasContent_() - ): + if super(BaseNonNegativeIntegerId, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseNonNegativeIntegerId', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseNonNegativeIntegerId') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseNonNegativeIntegerId", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseNonNegativeIntegerId") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseNonNegativeIntegerId') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseNonNegativeIntegerId", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseNonNegativeIntegerId', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseNonNegativeIntegerId'): - super(BaseNonNegativeIntegerId, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseNonNegativeIntegerId') - if self.id is not None and 'id' not in already_processed: - already_processed.add('id') - outfile.write(' id=%s' % (quote_attrib(self.id), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseNonNegativeIntegerId", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseNonNegativeIntegerId", + ): + super(BaseNonNegativeIntegerId, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseNonNegativeIntegerId", + ) + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write(" id=%s" % (quote_attrib(self.id),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseNonNegativeIntegerId', fromsubclass_=False, pretty_print=True): - super(BaseNonNegativeIntegerId, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseNonNegativeIntegerId", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseNonNegativeIntegerId, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -6829,25 +11742,35 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('id', node) - if value is not None and 'id' not in already_processed: - already_processed.add('id') + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") try: self.id = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.id < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.id) # validate type NonNegativeInteger - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.id + ) # validate type NonNegativeInteger + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseNonNegativeIntegerId, self).buildAttributes(node, attrs, already_processed) + super(BaseNonNegativeIntegerId, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseNonNegativeIntegerId, self).buildChildren(child_, node, nodeName_, True) + super(BaseNonNegativeIntegerId, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class BaseNonNegativeIntegerId @@ -6855,74 +11778,128 @@ class Base(BaseWithoutId): """Anything which can have a unique (within its parent) id of the form NmlId (spaceless combination of letters, numbers and underscore).""" + member_data_items_ = [ - MemberSpec_('id', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_("id", "NmlId", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseWithoutId + def __init__(self, neuro_lex_id=None, id=None, extensiontype_=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Base, self).__init__(neuro_lex_id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Base, self).__init__(neuro_lex_id, extensiontype_, **kwargs_) self.id = _cast(None, id) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Base) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Base) if subclass is not None: return subclass(*args_, **kwargs_) if Base.subclass: return Base.subclass(*args_, **kwargs_) else: return Base(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): - if ( - super(Base, self).hasContent_() - ): + if super(Base, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Base', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Base') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Base", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Base") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Base') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Base" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Base', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Base'): - super(Base, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Base') - if self.id is not None and 'id' not in already_processed: - already_processed.add('id') - outfile.write(' id=%s' % (quote_attrib(self.id), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Base", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Base" + ): + super(Base, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Base" + ) + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write(" id=%s" % (quote_attrib(self.id),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Base', fromsubclass_=False, pretty_print=True): - super(Base, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Base", + fromsubclass_=False, + pretty_print=True, + ): + super(Base, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -6930,38 +11907,80 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('id', node) - if value is not None and 'id' not in already_processed: - already_processed.add('id') + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") self.id = value - self.validate_NmlId(self.id) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.id) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(Base, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(Base, self).buildChildren(child_, node, nodeName_, True) pass + + # end class Base class Standalone(Base): """Elements which can stand alone and be referenced by id, e.g. cell, morphology.""" + member_data_items_ = [ - MemberSpec_('metaid', 'MetaId', 0, 1, {'use': u'optional'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('properties', 'Property', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Property', u'name': u'property', u'minOccurs': u'0'}, None), - MemberSpec_('annotation', 'Annotation', 0, 1, {u'type': u'Annotation', u'name': u'annotation', u'minOccurs': u'0'}, None), + MemberSpec_("metaid", "MetaId", 0, 1, {"use": u"optional"}), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {u"type": u"xs:string", u"name": u"notes", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "properties", + "Property", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Property", + u"name": u"property", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "annotation", + "Annotation", + 0, + 1, + {u"type": u"Annotation", u"name": u"annotation", u"minOccurs": u"0"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Standalone, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Standalone, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) self.metaid = _cast(None, metaid) self.notes = notes self.validate_Notes(self.notes) @@ -6971,81 +11990,162 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti self.properties = properties self.annotation = annotation self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Standalone) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Standalone) if subclass is not None: return subclass(*args_, **kwargs_) if Standalone.subclass: return Standalone.subclass(*args_, **kwargs_) else: return Standalone(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): # Validate type Notes, a restriction on xs:string. if value is not None and Validate_simpletypes_: pass + def validate_MetaId(self, value): # Validate type MetaId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_MetaId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_MetaId_patterns_, )) - validate_MetaId_patterns_ = [[u'^[a-zA-Z0-9_]*$']] + self.validate_MetaId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_MetaId_patterns_, + ) + ) + + validate_MetaId_patterns_ = [[u"^[a-zA-Z0-9_]*$"]] + def hasContent_(self): if ( - self.notes is not None or - self.properties or - self.annotation is not None or - super(Standalone, self).hasContent_() + self.notes is not None + or self.properties + or self.annotation is not None + or super(Standalone, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Standalone', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Standalone') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Standalone", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Standalone") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Standalone') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Standalone" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Standalone', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Standalone", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Standalone'): - super(Standalone, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Standalone') - if self.metaid is not None and 'metaid' not in already_processed: - already_processed.add('metaid') - outfile.write(' metaid=%s' % (quote_attrib(self.metaid), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Standalone" + ): + super(Standalone, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Standalone" + ) + if self.metaid is not None and "metaid" not in already_processed: + already_processed.add("metaid") + outfile.write(" metaid=%s" % (quote_attrib(self.metaid),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Standalone', fromsubclass_=False, pretty_print=True): - super(Standalone, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Standalone", + fromsubclass_=False, + pretty_print=True, + ): + super(Standalone, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) for property_ in self.properties: - property_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='property', pretty_print=pretty_print) + property_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="property", + pretty_print=pretty_print, + ) if self.annotation is not None: - self.annotation.export(outfile, level, namespaceprefix_, namespacedef_='', name_='annotation', pretty_print=pretty_print) + self.annotation.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="annotation", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -7053,119 +12153,218 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('metaid', node) - if value is not None and 'metaid' not in already_processed: - already_processed.add('metaid') + value = find_attr_value_("metaid", node) + if value is not None and "metaid" not in already_processed: + already_processed.add("metaid") self.metaid = value - self.validate_MetaId(self.metaid) # validate type MetaId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_MetaId(self.metaid) # validate type MetaId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(Standalone, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': + if nodeName_ == "notes": notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') + notes_ = self.gds_validate_string(notes_, node, "notes") self.notes = notes_ # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'property': + elif nodeName_ == "property": obj_ = Property.factory(parent_object_=self) obj_.build(child_) self.properties.append(obj_) - obj_.original_tagname_ = 'property' - elif nodeName_ == 'annotation': + obj_.original_tagname_ = "property" + elif nodeName_ == "annotation": obj_ = Annotation.factory(parent_object_=self) obj_.build(child_) self.annotation = obj_ - obj_.original_tagname_ = 'annotation' + obj_.original_tagname_ = "annotation" super(Standalone, self).buildChildren(child_, node, nodeName_, True) + + # end class Standalone class SpikeSourcePoisson(Standalone): member_data_items_ = [ - MemberSpec_('start', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('rate', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), + MemberSpec_("start", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("duration", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("rate", "Nml2Quantity_pertime", 0, 0, {"use": u"required"}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, start=None, duration=None, rate=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + start=None, + duration=None, + rate=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeSourcePoisson, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(SpikeSourcePoisson, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.start = _cast(None, start) self.duration = _cast(None, duration) self.rate = _cast(None, rate) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeSourcePoisson) + CurrentSubclassModule_, SpikeSourcePoisson + ) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeSourcePoisson.subclass: return SpikeSourcePoisson.subclass(*args_, **kwargs_) else: return SpikeSourcePoisson(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.validate_Nml2Quantity_pertime_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$"] + ] + def hasContent_(self): - if ( - super(SpikeSourcePoisson, self).hasContent_() - ): + if super(SpikeSourcePoisson, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeSourcePoisson', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeSourcePoisson') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeSourcePoisson", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeSourcePoisson") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeSourcePoisson') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeSourcePoisson", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeSourcePoisson', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeSourcePoisson", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeSourcePoisson'): - super(SpikeSourcePoisson, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeSourcePoisson') - if self.start is not None and 'start' not in already_processed: - already_processed.add('start') - outfile.write(' start=%s' % (quote_attrib(self.start), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.rate is not None and 'rate' not in already_processed: - already_processed.add('rate') - outfile.write(' rate=%s' % (quote_attrib(self.rate), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeSourcePoisson', fromsubclass_=False, pretty_print=True): - super(SpikeSourcePoisson, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpikeSourcePoisson", + ): + super(SpikeSourcePoisson, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeSourcePoisson", + ) + if self.start is not None and "start" not in already_processed: + already_processed.add("start") + outfile.write(" start=%s" % (quote_attrib(self.start),)) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write(" duration=%s" % (quote_attrib(self.duration),)) + if self.rate is not None and "rate" not in already_processed: + already_processed.add("rate") + outfile.write(" rate=%s" % (quote_attrib(self.rate),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeSourcePoisson", + fromsubclass_=False, + pretty_print=True, + ): + super(SpikeSourcePoisson, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -7173,44 +12372,89 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('start', node) - if value is not None and 'start' not in already_processed: - already_processed.add('start') + value = find_attr_value_("start", node) + if value is not None and "start" not in already_processed: + already_processed.add("start") self.start = value - self.validate_Nml2Quantity_time(self.start) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.start + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('rate', node) - if value is not None and 'rate' not in already_processed: - already_processed.add('rate') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("rate", node) + if value is not None and "rate" not in already_processed: + already_processed.add("rate") self.rate = value - self.validate_Nml2Quantity_pertime(self.rate) # validate type Nml2Quantity_pertime + self.validate_Nml2Quantity_pertime( + self.rate + ) # validate type Nml2Quantity_pertime super(SpikeSourcePoisson, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(SpikeSourcePoisson, self).buildChildren(child_, node, nodeName_, True) pass + + # end class SpikeSourcePoisson class InputList(Base): """List of inputs to a population. Currents will be provided by the specified component.""" + member_data_items_ = [ - MemberSpec_('populations', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('component', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('input', 'Input', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Input', u'name': u'input', u'minOccurs': u'0'}, None), - MemberSpec_('input_ws', 'InputW', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InputW', u'name': u'inputW', u'minOccurs': u'0'}, None), + MemberSpec_("populations", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("component", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "input", + "Input", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Input", + u"name": u"input", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "input_ws", + "InputW", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"InputW", + u"name": u"inputW", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, populations=None, component=None, input=None, input_ws=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + populations=None, + component=None, + input=None, + input_ws=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(InputList, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(InputList, self).__init__(neuro_lex_id, id, **kwargs_) self.populations = _cast(None, populations) self.component = _cast(None, component) if input is None: @@ -7221,72 +12465,136 @@ def __init__(self, neuro_lex_id=None, id=None, populations=None, component=None, self.input_ws = [] else: self.input_ws = input_ws + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, InputList) + subclass = getSubclassFromModule_(CurrentSubclassModule_, InputList) if subclass is not None: return subclass(*args_, **kwargs_) if InputList.subclass: return InputList.subclass(*args_, **kwargs_) else: return InputList(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): - if ( - self.input or - self.input_ws or - super(InputList, self).hasContent_() - ): + if self.input or self.input_ws or super(InputList, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InputList', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InputList') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InputList", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InputList") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputList') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="InputList" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InputList', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InputList", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InputList'): - super(InputList, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputList') - if self.populations is not None and 'populations' not in already_processed: - already_processed.add('populations') - outfile.write(' population=%s' % (quote_attrib(self.populations), )) - if self.component is not None and 'component' not in already_processed: - already_processed.add('component') - outfile.write(' component=%s' % (quote_attrib(self.component), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InputList', fromsubclass_=False, pretty_print=True): - super(InputList, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="InputList" + ): + super(InputList, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="InputList" + ) + if self.populations is not None and "populations" not in already_processed: + already_processed.add("populations") + outfile.write(" population=%s" % (quote_attrib(self.populations),)) + if self.component is not None and "component" not in already_processed: + already_processed.add("component") + outfile.write(" component=%s" % (quote_attrib(self.component),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InputList", + fromsubclass_=False, + pretty_print=True, + ): + super(InputList, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for input_ in self.input: - input_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='input', pretty_print=pretty_print) + input_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="input", + pretty_print=pretty_print, + ) for inputW_ in self.input_ws: - inputW_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='inputW', pretty_print=pretty_print) + inputW_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="inputW", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -7294,40 +12602,41 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('population', node) - if value is not None and 'population' not in already_processed: - already_processed.add('population') + value = find_attr_value_("population", node) + if value is not None and "population" not in already_processed: + already_processed.add("population") self.populations = value - self.validate_NmlId(self.populations) # validate type NmlId - value = find_attr_value_('component', node) - if value is not None and 'component' not in already_processed: - already_processed.add('component') + self.validate_NmlId(self.populations) # validate type NmlId + value = find_attr_value_("component", node) + if value is not None and "component" not in already_processed: + already_processed.add("component") self.component = value - self.validate_NmlId(self.component) # validate type NmlId + self.validate_NmlId(self.component) # validate type NmlId super(InputList, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'input': + if nodeName_ == "input": class_obj_ = self.get_class_obj_(child_, Input) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.input.append(obj_) - obj_.original_tagname_ = 'input' - elif nodeName_ == 'inputW': + obj_.original_tagname_ = "input" + elif nodeName_ == "inputW": obj_ = InputW.factory(parent_object_=self) obj_.build(child_) self.input_ws.append(obj_) - obj_.original_tagname_ = 'inputW' + obj_.original_tagname_ = "inputW" super(InputList, self).buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): - """Export to HDF5 file. """ - #print("Exporting InputList: "+str(self.id)+" as HDF5") - + """Export to HDF5 file.""" + # print("Exporting InputList: "+str(self.id)+" as HDF5") import numpy - ilGroup = h5file.create_group(h5Group, 'inputList_'+self.id) + ilGroup = h5file.create_group(h5Group, "inputList_" + self.id) ilGroup._f_setattr("id", self.id) ilGroup._f_setattr("component", self.component) ilGroup._f_setattr("population", self.populations) @@ -7336,33 +12645,35 @@ def exportHdf5(self, h5file, h5Group): extra_cols = {} - num_tot = len(self.input)+len(self.input_ws) + num_tot = len(self.input) + len(self.input_ws) - if len(self.input_ws)>0: - extra_cols["column_"+str(cols)] = 'weight' - cols+=1 + if len(self.input_ws) > 0: + extra_cols["column_" + str(cols)] = "weight" + cols += 1 - #print("Exporting "+str(num_tot)+" inputs") + # print("Exporting "+str(num_tot)+" inputs") a = numpy.zeros([num_tot, cols], numpy.float32) - count=0 + count = 0 for input in self.input: - a[count,0] = input.id - a[count,1] = input.get_target_cell_id() - a[count,2] = input.get_segment_id() - a[count,3] = input.get_fraction_along() - count+=1 + a[count, 0] = input.id + a[count, 1] = input.get_target_cell_id() + a[count, 2] = input.get_segment_id() + a[count, 3] = input.get_fraction_along() + count += 1 for input in self.input_ws: - a[count,0] = input.id - a[count,1] = input.get_target_cell_id() - a[count,2] = input.get_segment_id() - a[count,3] = input.get_fraction_along() - a[count,4] = input.get_weight() - count+=1 + a[count, 0] = input.id + a[count, 1] = input.get_target_cell_id() + a[count, 2] = input.get_segment_id() + a[count, 3] = input.get_fraction_along() + a[count, 4] = input.get_weight() + count += 1 - array = h5file.create_carray(ilGroup, self.id, obj=a, title="Locations of inputs in "+ self.id) + array = h5file.create_carray( + ilGroup, self.id, obj=a, title="Locations of inputs in " + self.id + ) array._f_setattr("column_0", "id") array._f_setattr("column_1", "target_cell_id") @@ -7373,8 +12684,14 @@ def exportHdf5(self, h5file, h5Group): def __str__(self): - return "Input list: "+self.id+" to "+self.populations+", component "+self.component - + return ( + "Input list: " + + self.id + + " to " + + self.populations + + ", component " + + self.component + ) # end class InputList @@ -7382,62 +12699,113 @@ def __str__(self): class BaseConnection(BaseNonNegativeIntegerId): """Base of all synaptic connections (chemical/electrical/analog, etc.) inside projections""" - member_data_items_ = [ - ] + + member_data_items_ = [] subclass = None superclass = BaseNonNegativeIntegerId + def __init__(self, neuro_lex_id=None, id=None, extensiontype_=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConnection, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BaseConnection, self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseConnection) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BaseConnection) if subclass is not None: return subclass(*args_, **kwargs_) if BaseConnection.subclass: return BaseConnection.subclass(*args_, **kwargs_) else: return BaseConnection(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(BaseConnection, self).hasContent_() - ): + if super(BaseConnection, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseConnection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConnection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseConnection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnection') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseConnection" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConnection', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConnection'): - super(BaseConnection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnection') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseConnection", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseConnection", + ): + super(BaseConnection, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseConnection" + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnection', fromsubclass_=False, pretty_print=True): - super(BaseConnection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConnection", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseConnection, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -7445,94 +12813,179 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(BaseConnection, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(BaseConnection, self).buildChildren(child_, node, nodeName_, True) pass + + # end class BaseConnection class BaseProjection(Base): """Base for projection (set of synaptic connections) between two populations""" + member_data_items_ = [ - MemberSpec_('presynaptic_population', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('postsynaptic_population', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_("presynaptic_population", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("postsynaptic_population", "NmlId", 0, 0, {"use": u"required"}), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + presynaptic_population=None, + postsynaptic_population=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseProjection, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BaseProjection, self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.presynaptic_population = _cast(None, presynaptic_population) self.postsynaptic_population = _cast(None, postsynaptic_population) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseProjection) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BaseProjection) if subclass is not None: return subclass(*args_, **kwargs_) if BaseProjection.subclass: return BaseProjection.subclass(*args_, **kwargs_) else: return BaseProjection(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): - if ( - super(BaseProjection, self).hasContent_() - ): + if super(BaseProjection, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseProjection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseProjection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseProjection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseProjection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseProjection') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseProjection" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseProjection', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseProjection'): - super(BaseProjection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseProjection') - if self.presynaptic_population is not None and 'presynaptic_population' not in already_processed: - already_processed.add('presynaptic_population') - outfile.write(' presynapticPopulation=%s' % (quote_attrib(self.presynaptic_population), )) - if self.postsynaptic_population is not None and 'postsynaptic_population' not in already_processed: - already_processed.add('postsynaptic_population') - outfile.write(' postsynapticPopulation=%s' % (quote_attrib(self.postsynaptic_population), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseProjection", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseProjection", + ): + super(BaseProjection, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseProjection" + ) + if ( + self.presynaptic_population is not None + and "presynaptic_population" not in already_processed + ): + already_processed.add("presynaptic_population") + outfile.write( + " presynapticPopulation=%s" + % (quote_attrib(self.presynaptic_population),) + ) + if ( + self.postsynaptic_population is not None + and "postsynaptic_population" not in already_processed + ): + already_processed.add("postsynaptic_population") + outfile.write( + " postsynapticPopulation=%s" + % (quote_attrib(self.postsynaptic_population),) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseProjection', fromsubclass_=False, pretty_print=True): - super(BaseProjection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseProjection", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseProjection, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -7540,97 +12993,166 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('presynapticPopulation', node) - if value is not None and 'presynapticPopulation' not in already_processed: - already_processed.add('presynapticPopulation') + value = find_attr_value_("presynapticPopulation", node) + if value is not None and "presynapticPopulation" not in already_processed: + already_processed.add("presynapticPopulation") self.presynaptic_population = value - self.validate_NmlId(self.presynaptic_population) # validate type NmlId - value = find_attr_value_('postsynapticPopulation', node) - if value is not None and 'postsynapticPopulation' not in already_processed: - already_processed.add('postsynapticPopulation') + self.validate_NmlId(self.presynaptic_population) # validate type NmlId + value = find_attr_value_("postsynapticPopulation", node) + if value is not None and "postsynapticPopulation" not in already_processed: + already_processed.add("postsynapticPopulation") self.postsynaptic_population = value - self.validate_NmlId(self.postsynaptic_population) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.postsynaptic_population) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(BaseProjection, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(BaseProjection, self).buildChildren(child_, node, nodeName_, True) pass + + # end class BaseProjection class CellSet(Base): member_data_items_ = [ - MemberSpec_('select', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_("select", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_( + "__ANY__", + "__ANY__", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"processContents": u"skip", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, select=None, anytypeobjs_=None, **kwargs_): + + def __init__( + self, neuro_lex_id=None, id=None, select=None, anytypeobjs_=None, **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(CellSet, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(CellSet, self).__init__(neuro_lex_id, id, **kwargs_) self.select = _cast(None, select) if anytypeobjs_ is None: self.anytypeobjs_ = [] else: self.anytypeobjs_ = anytypeobjs_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, CellSet) + subclass = getSubclassFromModule_(CurrentSubclassModule_, CellSet) if subclass is not None: return subclass(*args_, **kwargs_) if CellSet.subclass: return CellSet.subclass(*args_, **kwargs_) else: return CellSet(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - self.anytypeobjs_ or - super(CellSet, self).hasContent_() - ): + if self.anytypeobjs_ or super(CellSet, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CellSet', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('CellSet') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CellSet", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("CellSet") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CellSet') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CellSet" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CellSet', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="CellSet", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CellSet'): - super(CellSet, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CellSet') - if self.select is not None and 'select' not in already_processed: - already_processed.add('select') - outfile.write(' select=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.select), input_name='select')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CellSet', fromsubclass_=False, pretty_print=True): - super(CellSet, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="CellSet" + ): + super(CellSet, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CellSet" + ) + if self.select is not None and "select" not in already_processed: + already_processed.add("select") + outfile.write( + " select=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.select), input_name="select" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CellSet", + fromsubclass_=False, + pretty_print=True, + ): + super(CellSet, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for obj_ in self.anytypeobjs_: obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -7638,35 +13160,71 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('select', node) - if value is not None and 'select' not in already_processed: - already_processed.add('select') + value = find_attr_value_("select", node) + if value is not None and "select" not in already_processed: + already_processed.add("select") self.select = value super(CellSet, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'CellSet') + obj_ = self.gds_build_any(child_, "CellSet") if obj_ is not None: self.add_anytypeobjs_(obj_) super(CellSet, self).buildChildren(child_, node, nodeName_, True) + + # end class CellSet class Population(Standalone): member_data_items_ = [ - MemberSpec_('component', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('size', 'NonNegativeInteger', 0, 1, {'use': u'optional'}), - MemberSpec_('type', 'populationTypes', 0, 1, {'use': u'optional'}), - MemberSpec_('extracellular_properties', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('layout', 'Layout', 0, 1, {u'type': u'Layout', u'name': u'layout', u'minOccurs': u'0'}, 4), - MemberSpec_('instances', 'Instance', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'Instance', u'name': u'instance'}, 4), + MemberSpec_("component", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("size", "NonNegativeInteger", 0, 1, {"use": u"optional"}), + MemberSpec_("type", "populationTypes", 0, 1, {"use": u"optional"}), + MemberSpec_("extracellular_properties", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_( + "layout", + "Layout", + 0, + 1, + {u"type": u"Layout", u"name": u"layout", u"minOccurs": u"0"}, + 4, + ), + MemberSpec_( + "instances", + "Instance", + 1, + 0, + {u"maxOccurs": u"unbounded", u"type": u"Instance", u"name": u"instance"}, + 4, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, component=None, size=None, type=None, extracellular_properties=None, layout=None, instances=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + component=None, + size=None, + type=None, + extracellular_properties=None, + layout=None, + instances=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Population, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Population, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.component = _cast(None, component) self.size = _cast(int, size) self.type = _cast(None, type) @@ -7676,94 +13234,173 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti self.instances = [] else: self.instances = instances + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Population) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Population) if subclass is not None: return subclass(*args_, **kwargs_) if Population.subclass: return Population.subclass(*args_, **kwargs_) else: return Population(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_: pass + def validate_populationTypes(self, value): # Validate type populationTypes, a restriction on xs:string. if value is not None and Validate_simpletypes_: value = str(value) - enumerations = ['population', 'populationList'] + enumerations = ["population", "populationList"] enumeration_respectee = False for enum in enumerations: if value == enum: enumeration_respectee = True break if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on populationTypes' % {"value" : value.encode("utf-8")} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd enumeration restriction on populationTypes' + % {"value": value.encode("utf-8")} + ) + def hasContent_(self): if ( - self.layout is not None or - self.instances or - super(Population, self).hasContent_() + self.layout is not None + or self.instances + or super(Population, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Population', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Population') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Population", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Population") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Population') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Population" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Population', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Population", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Population'): - super(Population, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Population') - if self.component is not None and 'component' not in already_processed: - already_processed.add('component') - outfile.write(' component=%s' % (quote_attrib(self.component), )) - if self.size is not None and 'size' not in already_processed: - already_processed.add('size') - outfile.write(' size=%s' % (quote_attrib(self.size), )) - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.extracellular_properties is not None and 'extracellular_properties' not in already_processed: - already_processed.add('extracellular_properties') - outfile.write(' extracellularProperties=%s' % (quote_attrib(self.extracellular_properties), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Population', fromsubclass_=False, pretty_print=True): - super(Population, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Population" + ): + super(Population, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Population" + ) + if self.component is not None and "component" not in already_processed: + already_processed.add("component") + outfile.write(" component=%s" % (quote_attrib(self.component),)) + if self.size is not None and "size" not in already_processed: + already_processed.add("size") + outfile.write(" size=%s" % (quote_attrib(self.size),)) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write(" type=%s" % (quote_attrib(self.type),)) + if ( + self.extracellular_properties is not None + and "extracellular_properties" not in already_processed + ): + already_processed.add("extracellular_properties") + outfile.write( + " extracellularProperties=%s" + % (quote_attrib(self.extracellular_properties),) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Population", + fromsubclass_=False, + pretty_print=True, + ): + super(Population, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.layout is not None: - self.layout.export(outfile, level, namespaceprefix_, namespacedef_='', name_='layout', pretty_print=pretty_print) + self.layout.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="layout", + pretty_print=pretty_print, + ) for instance_ in self.instances: - instance_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='instance', pretty_print=pretty_print) + instance_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="instance", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -7771,77 +13408,81 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('component', node) - if value is not None and 'component' not in already_processed: - already_processed.add('component') + value = find_attr_value_("component", node) + if value is not None and "component" not in already_processed: + already_processed.add("component") self.component = value - self.validate_NmlId(self.component) # validate type NmlId - value = find_attr_value_('size', node) - if value is not None and 'size' not in already_processed: - already_processed.add('size') + self.validate_NmlId(self.component) # validate type NmlId + value = find_attr_value_("size", node) + if value is not None and "size" not in already_processed: + already_processed.add("size") try: self.size = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.size < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.size) # validate type NonNegativeInteger - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.size + ) # validate type NonNegativeInteger + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_populationTypes(self.type) # validate type populationTypes - value = find_attr_value_('extracellularProperties', node) - if value is not None and 'extracellularProperties' not in already_processed: - already_processed.add('extracellularProperties') + self.validate_populationTypes(self.type) # validate type populationTypes + value = find_attr_value_("extracellularProperties", node) + if value is not None and "extracellularProperties" not in already_processed: + already_processed.add("extracellularProperties") self.extracellular_properties = value - self.validate_NmlId(self.extracellular_properties) # validate type NmlId + self.validate_NmlId(self.extracellular_properties) # validate type NmlId super(Population, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'layout': + if nodeName_ == "layout": obj_ = Layout.factory(parent_object_=self) obj_.build(child_) self.layout = obj_ - obj_.original_tagname_ = 'layout' - elif nodeName_ == 'instance': + obj_.original_tagname_ = "layout" + elif nodeName_ == "instance": obj_ = Instance.factory(parent_object_=self) obj_.build(child_) self.instances.append(obj_) - obj_.original_tagname_ = 'instance' + obj_.original_tagname_ = "instance" super(Population, self).buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): - """Export to HDF5 file. """ - #print("Exporting Population: "+str(self.id)+" as HDF5") - + """Export to HDF5 file.""" + # print("Exporting Population: "+str(self.id)+" as HDF5") import numpy - popGroup = h5file.create_group(h5Group, 'population_'+self.id) + popGroup = h5file.create_group(h5Group, "population_" + self.id) popGroup._f_setattr("id", self.id) popGroup._f_setattr("component", self.component) for p in self.properties: - popGroup._f_setattr("property:"+p.tag, p.value) + popGroup._f_setattr("property:" + p.tag, p.value) - - if len(self.instances)>0: + if len(self.instances) > 0: colCount = 3 a = numpy.zeros([len(self.instances), colCount], numpy.float32) - count=0 + count = 0 for instance in self.instances: - a[count,0] = instance.location.x - a[count,1] = instance.location.y - a[count,2] = instance.location.z + a[count, 0] = instance.location.x + a[count, 1] = instance.location.y + a[count, 2] = instance.location.z - count=count+1 + count = count + 1 popGroup._f_setattr("size", count) popGroup._f_setattr("type", "populationList") - array = h5file.create_carray(popGroup, self.id, obj=a, title="Locations of cells in "+ self.id) + array = h5file.create_carray( + popGroup, self.id, obj=a, title="Locations of cells in " + self.id + ) array._f_setattr("column_0", "x") array._f_setattr("column_1", "y") array._f_setattr("column_2", "z") @@ -7850,92 +13491,167 @@ def exportHdf5(self, h5file, h5Group): popGroup._f_setattr("size", self.size) def get_size(self): - return len(self.instances) if len(self.instances)>0 else (self.size if self.size else 0) + return ( + len(self.instances) + if len(self.instances) > 0 + else (self.size if self.size else 0) + ) def __str__(self): - return "Population: "+str(self.id)+" with "+str( self.get_size() )+" components of type "+(self.component if self.component else "???") - + return ( + "Population: " + + str(self.id) + + " with " + + str(self.get_size()) + + " components of type " + + (self.component if self.component else "???") + ) # end class Population class Region(Base): member_data_items_ = [ - MemberSpec_('spaces', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_("spaces", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_( + "__ANY__", + "__ANY__", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"processContents": u"skip", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, spaces=None, anytypeobjs_=None, **kwargs_): + + def __init__( + self, neuro_lex_id=None, id=None, spaces=None, anytypeobjs_=None, **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Region, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Region, self).__init__(neuro_lex_id, id, **kwargs_) self.spaces = _cast(None, spaces) if anytypeobjs_ is None: self.anytypeobjs_ = [] else: self.anytypeobjs_ = anytypeobjs_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Region) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Region) if subclass is not None: return subclass(*args_, **kwargs_) if Region.subclass: return Region.subclass(*args_, **kwargs_) else: return Region(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): - if ( - self.anytypeobjs_ or - super(Region, self).hasContent_() - ): + if self.anytypeobjs_ or super(Region, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Region', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Region') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Region", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Region") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Region') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Region" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Region', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Region", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Region'): - super(Region, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Region') - if self.spaces is not None and 'spaces' not in already_processed: - already_processed.add('spaces') - outfile.write(' space=%s' % (quote_attrib(self.spaces), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Region', fromsubclass_=False, pretty_print=True): - super(Region, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Region" + ): + super(Region, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Region" + ) + if self.spaces is not None and "spaces" not in already_processed: + already_processed.add("spaces") + outfile.write(" space=%s" % (quote_attrib(self.spaces),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Region", + fromsubclass_=False, + pretty_print=True, + ): + super(Region, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for obj_ in self.anytypeobjs_: obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -7943,99 +13659,173 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('space', node) - if value is not None and 'space' not in already_processed: - already_processed.add('space') + value = find_attr_value_("space", node) + if value is not None and "space" not in already_processed: + already_processed.add("space") self.spaces = value - self.validate_NmlId(self.spaces) # validate type NmlId + self.validate_NmlId(self.spaces) # validate type NmlId super(Region, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'Region') + obj_ = self.gds_build_any(child_, "Region") if obj_ is not None: self.add_anytypeobjs_(obj_) super(Region, self).buildChildren(child_, node, nodeName_, True) + + # end class Region class Space(Base): member_data_items_ = [ - MemberSpec_('based_on', 'allowedSpaces', 0, 1, {'use': u'optional'}), - MemberSpec_('structure', 'SpaceStructure', 0, 1, {u'type': u'SpaceStructure', u'name': u'structure', u'minOccurs': u'0'}, None), + MemberSpec_("based_on", "allowedSpaces", 0, 1, {"use": u"optional"}), + MemberSpec_( + "structure", + "SpaceStructure", + 0, + 1, + {u"type": u"SpaceStructure", u"name": u"structure", u"minOccurs": u"0"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, based_on=None, structure=None, **kwargs_): + + def __init__( + self, neuro_lex_id=None, id=None, based_on=None, structure=None, **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Space, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Space, self).__init__(neuro_lex_id, id, **kwargs_) self.based_on = _cast(None, based_on) self.structure = structure + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Space) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Space) if subclass is not None: return subclass(*args_, **kwargs_) if Space.subclass: return Space.subclass(*args_, **kwargs_) else: return Space(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_allowedSpaces(self, value): # Validate type allowedSpaces, a restriction on xs:string. if value is not None and Validate_simpletypes_: value = str(value) - enumerations = ['Euclidean_1D', 'Euclidean_2D', 'Euclidean_3D', 'Grid_1D', 'Grid_2D', 'Grid_3D'] + enumerations = [ + "Euclidean_1D", + "Euclidean_2D", + "Euclidean_3D", + "Grid_1D", + "Grid_2D", + "Grid_3D", + ] enumeration_respectee = False for enum in enumerations: if value == enum: enumeration_respectee = True break if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on allowedSpaces' % {"value" : value.encode("utf-8")} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd enumeration restriction on allowedSpaces' + % {"value": value.encode("utf-8")} + ) + def hasContent_(self): - if ( - self.structure is not None or - super(Space, self).hasContent_() - ): + if self.structure is not None or super(Space, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Space', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Space') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Space", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Space") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Space') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Space" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Space', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Space", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Space'): - super(Space, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Space') - if self.based_on is not None and 'based_on' not in already_processed: - already_processed.add('based_on') - outfile.write(' basedOn=%s' % (quote_attrib(self.based_on), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Space', fromsubclass_=False, pretty_print=True): - super(Space, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Space" + ): + super(Space, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Space" + ) + if self.based_on is not None and "based_on" not in already_processed: + already_processed.add("based_on") + outfile.write(" basedOn=%s" % (quote_attrib(self.based_on),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Space", + fromsubclass_=False, + pretty_print=True, + ): + super(Space, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.structure is not None: - self.structure.export(outfile, level, namespaceprefix_, namespacedef_='', name_='structure', pretty_print=pretty_print) + self.structure.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="structure", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -8043,45 +13833,207 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('basedOn', node) - if value is not None and 'basedOn' not in already_processed: - already_processed.add('basedOn') + value = find_attr_value_("basedOn", node) + if value is not None and "basedOn" not in already_processed: + already_processed.add("basedOn") self.based_on = value - self.validate_allowedSpaces(self.based_on) # validate type allowedSpaces + self.validate_allowedSpaces(self.based_on) # validate type allowedSpaces super(Space, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'structure': + if nodeName_ == "structure": obj_ = SpaceStructure.factory(parent_object_=self) obj_.build(child_) self.structure = obj_ - obj_.original_tagname_ = 'structure' + obj_.original_tagname_ = "structure" super(Space, self).buildChildren(child_, node, nodeName_, True) + + # end class Space class Network(Standalone): member_data_items_ = [ - MemberSpec_('type', 'networkTypes', 0, 1, {'use': u'optional'}), - MemberSpec_('temperature', 'Nml2Quantity_temperature', 0, 1, {'use': u'optional'}), - MemberSpec_('spaces', 'Space', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Space', u'name': u'space', u'minOccurs': u'0'}, None), - MemberSpec_('regions', 'Region', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Region', u'name': u'region', u'minOccurs': u'0'}, None), - MemberSpec_('extracellular_properties', 'ExtracellularPropertiesLocal', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExtracellularPropertiesLocal', u'name': u'extracellularProperties', u'minOccurs': u'0'}, None), - MemberSpec_('populations', 'Population', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'Population', u'name': u'population'}, None), - MemberSpec_('cell_sets', 'CellSet', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'CellSet', u'name': u'cellSet', u'minOccurs': u'0'}, None), - MemberSpec_('synaptic_connections', 'SynapticConnection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SynapticConnection', u'name': u'synapticConnection', u'minOccurs': u'0'}, None), - MemberSpec_('projections', 'Projection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Projection', u'name': u'projection', u'minOccurs': u'0'}, None), - MemberSpec_('electrical_projections', 'ElectricalProjection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ElectricalProjection', u'name': u'electricalProjection', u'minOccurs': u'0'}, None), - MemberSpec_('continuous_projections', 'ContinuousProjection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ContinuousProjection', u'name': u'continuousProjection', u'minOccurs': u'0'}, None), - MemberSpec_('explicit_inputs', 'ExplicitInput', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExplicitInput', u'name': u'explicitInput', u'minOccurs': u'0'}, None), - MemberSpec_('input_lists', 'InputList', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InputList', u'name': u'inputList', u'minOccurs': u'0'}, None), + MemberSpec_("type", "networkTypes", 0, 1, {"use": u"optional"}), + MemberSpec_( + "temperature", "Nml2Quantity_temperature", 0, 1, {"use": u"optional"} + ), + MemberSpec_( + "spaces", + "Space", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Space", + u"name": u"space", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "regions", + "Region", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Region", + u"name": u"region", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "extracellular_properties", + "ExtracellularPropertiesLocal", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ExtracellularPropertiesLocal", + u"name": u"extracellularProperties", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "populations", + "Population", + 1, + 0, + { + u"maxOccurs": u"unbounded", + u"type": u"Population", + u"name": u"population", + }, + None, + ), + MemberSpec_( + "cell_sets", + "CellSet", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"CellSet", + u"name": u"cellSet", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "synaptic_connections", + "SynapticConnection", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SynapticConnection", + u"name": u"synapticConnection", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "projections", + "Projection", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Projection", + u"name": u"projection", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "electrical_projections", + "ElectricalProjection", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ElectricalProjection", + u"name": u"electricalProjection", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "continuous_projections", + "ContinuousProjection", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ContinuousProjection", + u"name": u"continuousProjection", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "explicit_inputs", + "ExplicitInput", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ExplicitInput", + u"name": u"explicitInput", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "input_lists", + "InputList", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"InputList", + u"name": u"inputList", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, type=None, temperature=None, spaces=None, regions=None, extracellular_properties=None, populations=None, cell_sets=None, synaptic_connections=None, projections=None, electrical_projections=None, continuous_projections=None, explicit_inputs=None, input_lists=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + type=None, + temperature=None, + spaces=None, + regions=None, + extracellular_properties=None, + populations=None, + cell_sets=None, + synaptic_connections=None, + projections=None, + electrical_projections=None, + continuous_projections=None, + explicit_inputs=None, + input_lists=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Network, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Network, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.type = _cast(None, type) self.temperature = _cast(None, temperature) if spaces is None: @@ -8128,111 +14080,248 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti self.input_lists = [] else: self.input_lists = input_lists + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Network) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Network) if subclass is not None: return subclass(*args_, **kwargs_) if Network.subclass: return Network.subclass(*args_, **kwargs_) else: return Network(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_networkTypes(self, value): # Validate type networkTypes, a restriction on xs:string. if value is not None and Validate_simpletypes_: value = str(value) - enumerations = ['network', 'networkWithTemperature'] + enumerations = ["network", "networkWithTemperature"] enumeration_respectee = False for enum in enumerations: if value == enum: enumeration_respectee = True break if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on networkTypes' % {"value" : value.encode("utf-8")} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd enumeration restriction on networkTypes' + % {"value": value.encode("utf-8")} + ) + def validate_Nml2Quantity_temperature(self, value): # Validate type Nml2Quantity_temperature, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_temperature_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_temperature_patterns_, )) - validate_Nml2Quantity_temperature_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC)$']] + self.validate_Nml2Quantity_temperature_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_temperature_patterns_, + ) + ) + + validate_Nml2Quantity_temperature_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC)$"] + ] + def hasContent_(self): if ( - self.spaces or - self.regions or - self.extracellular_properties or - self.populations or - self.cell_sets or - self.synaptic_connections or - self.projections or - self.electrical_projections or - self.continuous_projections or - self.explicit_inputs or - self.input_lists or - super(Network, self).hasContent_() + self.spaces + or self.regions + or self.extracellular_properties + or self.populations + or self.cell_sets + or self.synaptic_connections + or self.projections + or self.electrical_projections + or self.continuous_projections + or self.explicit_inputs + or self.input_lists + or super(Network, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Network', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Network') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Network", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Network") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Network') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Network" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Network', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Network", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Network'): - super(Network, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Network') - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.temperature is not None and 'temperature' not in already_processed: - already_processed.add('temperature') - outfile.write(' temperature=%s' % (quote_attrib(self.temperature), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Network', fromsubclass_=False, pretty_print=True): - super(Network, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Network" + ): + super(Network, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Network" + ) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write(" type=%s" % (quote_attrib(self.type),)) + if self.temperature is not None and "temperature" not in already_processed: + already_processed.add("temperature") + outfile.write(" temperature=%s" % (quote_attrib(self.temperature),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Network", + fromsubclass_=False, + pretty_print=True, + ): + super(Network, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for space_ in self.spaces: - space_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='space', pretty_print=pretty_print) + space_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="space", + pretty_print=pretty_print, + ) for region_ in self.regions: - region_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='region', pretty_print=pretty_print) + region_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="region", + pretty_print=pretty_print, + ) for extracellularProperties_ in self.extracellular_properties: - extracellularProperties_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='extracellularProperties', pretty_print=pretty_print) + extracellularProperties_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="extracellularProperties", + pretty_print=pretty_print, + ) for population_ in self.populations: - population_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='population', pretty_print=pretty_print) + population_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="population", + pretty_print=pretty_print, + ) for cellSet_ in self.cell_sets: - cellSet_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='cellSet', pretty_print=pretty_print) + cellSet_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="cellSet", + pretty_print=pretty_print, + ) for synapticConnection_ in self.synaptic_connections: - synapticConnection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='synapticConnection', pretty_print=pretty_print) + synapticConnection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="synapticConnection", + pretty_print=pretty_print, + ) for projection_ in self.projections: - projection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='projection', pretty_print=pretty_print) + projection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="projection", + pretty_print=pretty_print, + ) for electricalProjection_ in self.electrical_projections: - electricalProjection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='electricalProjection', pretty_print=pretty_print) + electricalProjection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="electricalProjection", + pretty_print=pretty_print, + ) for continuousProjection_ in self.continuous_projections: - continuousProjection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='continuousProjection', pretty_print=pretty_print) + continuousProjection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="continuousProjection", + pretty_print=pretty_print, + ) for explicitInput_ in self.explicit_inputs: - explicitInput_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='explicitInput', pretty_print=pretty_print) + explicitInput_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="explicitInput", + pretty_print=pretty_print, + ) for inputList_ in self.input_lists: - inputList_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='inputList', pretty_print=pretty_print) + inputList_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="inputList", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -8240,78 +14329,83 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_networkTypes(self.type) # validate type networkTypes - value = find_attr_value_('temperature', node) - if value is not None and 'temperature' not in already_processed: - already_processed.add('temperature') + self.validate_networkTypes(self.type) # validate type networkTypes + value = find_attr_value_("temperature", node) + if value is not None and "temperature" not in already_processed: + already_processed.add("temperature") self.temperature = value - self.validate_Nml2Quantity_temperature(self.temperature) # validate type Nml2Quantity_temperature + self.validate_Nml2Quantity_temperature( + self.temperature + ) # validate type Nml2Quantity_temperature super(Network, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'space': + if nodeName_ == "space": obj_ = Space.factory(parent_object_=self) obj_.build(child_) self.spaces.append(obj_) - obj_.original_tagname_ = 'space' - elif nodeName_ == 'region': + obj_.original_tagname_ = "space" + elif nodeName_ == "region": obj_ = Region.factory(parent_object_=self) obj_.build(child_) self.regions.append(obj_) - obj_.original_tagname_ = 'region' - elif nodeName_ == 'extracellularProperties': + obj_.original_tagname_ = "region" + elif nodeName_ == "extracellularProperties": obj_ = ExtracellularPropertiesLocal.factory(parent_object_=self) obj_.build(child_) self.extracellular_properties.append(obj_) - obj_.original_tagname_ = 'extracellularProperties' - elif nodeName_ == 'population': + obj_.original_tagname_ = "extracellularProperties" + elif nodeName_ == "population": obj_ = Population.factory(parent_object_=self) obj_.build(child_) self.populations.append(obj_) - obj_.original_tagname_ = 'population' - elif nodeName_ == 'cellSet': + obj_.original_tagname_ = "population" + elif nodeName_ == "cellSet": obj_ = CellSet.factory(parent_object_=self) obj_.build(child_) self.cell_sets.append(obj_) - obj_.original_tagname_ = 'cellSet' - elif nodeName_ == 'synapticConnection': + obj_.original_tagname_ = "cellSet" + elif nodeName_ == "synapticConnection": obj_ = SynapticConnection.factory(parent_object_=self) obj_.build(child_) self.synaptic_connections.append(obj_) - obj_.original_tagname_ = 'synapticConnection' - elif nodeName_ == 'projection': + obj_.original_tagname_ = "synapticConnection" + elif nodeName_ == "projection": obj_ = Projection.factory(parent_object_=self) obj_.build(child_) self.projections.append(obj_) - obj_.original_tagname_ = 'projection' - elif nodeName_ == 'electricalProjection': + obj_.original_tagname_ = "projection" + elif nodeName_ == "electricalProjection": obj_ = ElectricalProjection.factory(parent_object_=self) obj_.build(child_) self.electrical_projections.append(obj_) - obj_.original_tagname_ = 'electricalProjection' - elif nodeName_ == 'continuousProjection': + obj_.original_tagname_ = "electricalProjection" + elif nodeName_ == "continuousProjection": obj_ = ContinuousProjection.factory(parent_object_=self) obj_.build(child_) self.continuous_projections.append(obj_) - obj_.original_tagname_ = 'continuousProjection' - elif nodeName_ == 'explicitInput': + obj_.original_tagname_ = "continuousProjection" + elif nodeName_ == "explicitInput": obj_ = ExplicitInput.factory(parent_object_=self) obj_.build(child_) self.explicit_inputs.append(obj_) - obj_.original_tagname_ = 'explicitInput' - elif nodeName_ == 'inputList': + obj_.original_tagname_ = "explicitInput" + elif nodeName_ == "inputList": obj_ = InputList.factory(parent_object_=self) obj_.build(child_) self.input_lists.append(obj_) - obj_.original_tagname_ = 'inputList' + obj_.original_tagname_ = "inputList" super(Network, self).buildChildren(child_, node, nodeName_, True) warn_count = 0 - def get_by_id(self,id): + + def get_by_id(self, id): """Get a component by its ID :param id: ID of component to find @@ -8322,39 +14416,47 @@ def get_by_id(self,id): for ms in self.member_data_items_: mlist = self.__getattribute__(ms.name) for m in mlist: - if hasattr(m,"id"): + if hasattr(m, "id"): if m.id == id: return m else: all_ids.append(m.id) from neuroml.loaders import print_ - if self.warn_count<10: - print_("Id "+id+" not found in element. All ids: "+str(sorted(all_ids))) - self.warn_count+=1 - elif self.warn_count==10: + + if self.warn_count < 10: + print_( + "Id " + + id + + " not found in element. All ids: " + + str(sorted(all_ids)) + ) + self.warn_count += 1 + elif self.warn_count == 10: print_(" - Suppressing further warnings about id not found...") return None - def __str__(self): - return "Network "+str(self.id)+" with "+str(len(self.populations))+" population(s)" + return ( + "Network " + + str(self.id) + + " with " + + str(len(self.populations)) + + " population(s)" + ) - def exportHdf5(self, h5file, h5Group): - """Export to HDF5 file. """ - #print("Exporting Network: "+str(self.id)+" as HDF5") - + """Export to HDF5 file.""" + # print("Exporting Network: "+str(self.id)+" as HDF5") import numpy - netGroup = h5file.create_group(h5Group, 'network') + netGroup = h5file.create_group(h5Group, "network") netGroup._f_setattr("id", self.id) netGroup._f_setattr("notes", self.notes) if self.temperature: netGroup._f_setattr("temperature", self.temperature) - for pop in self.populations: pop.exportHdf5(h5file, netGroup) @@ -8375,101 +14477,215 @@ def exportHdf5(self, h5file, h5Group): for il in self.input_lists: il.exportHdf5(h5file, netGroup) - # end class Network class TransientPoissonFiringSynapse(Standalone): member_data_items_ = [ - MemberSpec_('average_rate', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('synapse', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('spike_target', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_("average_rate", "Nml2Quantity_pertime", 0, 0, {"use": u"required"}), + MemberSpec_("delay", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("duration", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("synapse", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("spike_target", "xs:string", 0, 0, {"use": u"required"}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, delay=None, duration=None, synapse=None, spike_target=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + average_rate=None, + delay=None, + duration=None, + synapse=None, + spike_target=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(TransientPoissonFiringSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(TransientPoissonFiringSynapse, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.average_rate = _cast(None, average_rate) self.delay = _cast(None, delay) self.duration = _cast(None, duration) self.synapse = _cast(None, synapse) self.spike_target = _cast(None, spike_target) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, TransientPoissonFiringSynapse) + CurrentSubclassModule_, TransientPoissonFiringSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if TransientPoissonFiringSynapse.subclass: return TransientPoissonFiringSynapse.subclass(*args_, **kwargs_) else: return TransientPoissonFiringSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.validate_Nml2Quantity_pertime_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$"] + ] + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(TransientPoissonFiringSynapse, self).hasContent_() - ): + if super(TransientPoissonFiringSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TransientPoissonFiringSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransientPoissonFiringSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="TransientPoissonFiringSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("TransientPoissonFiringSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TransientPoissonFiringSynapse') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="TransientPoissonFiringSynapse", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TransientPoissonFiringSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="TransientPoissonFiringSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TransientPoissonFiringSynapse'): - super(TransientPoissonFiringSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TransientPoissonFiringSynapse') - if self.average_rate is not None and 'average_rate' not in already_processed: - already_processed.add('average_rate') - outfile.write(' averageRate=%s' % (quote_attrib(self.average_rate), )) - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.synapse is not None and 'synapse' not in already_processed: - already_processed.add('synapse') - outfile.write(' synapse=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse), input_name='synapse')), )) - if self.spike_target is not None and 'spike_target' not in already_processed: - already_processed.add('spike_target') - outfile.write(' spikeTarget=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.spike_target), input_name='spikeTarget')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TransientPoissonFiringSynapse', fromsubclass_=False, pretty_print=True): - super(TransientPoissonFiringSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="TransientPoissonFiringSynapse", + ): + super(TransientPoissonFiringSynapse, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="TransientPoissonFiringSynapse", + ) + if self.average_rate is not None and "average_rate" not in already_processed: + already_processed.add("average_rate") + outfile.write(" averageRate=%s" % (quote_attrib(self.average_rate),)) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write(" delay=%s" % (quote_attrib(self.delay),)) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write(" duration=%s" % (quote_attrib(self.duration),)) + if self.synapse is not None and "synapse" not in already_processed: + already_processed.add("synapse") + outfile.write( + " synapse=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse), input_name="synapse" + ) + ), + ) + ) + if self.spike_target is not None and "spike_target" not in already_processed: + already_processed.add("spike_target") + outfile.write( + " spikeTarget=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.spike_target), input_name="spikeTarget" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="TransientPoissonFiringSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(TransientPoissonFiringSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -8477,111 +14693,227 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('averageRate', node) - if value is not None and 'averageRate' not in already_processed: - already_processed.add('averageRate') + value = find_attr_value_("averageRate", node) + if value is not None and "averageRate" not in already_processed: + already_processed.add("averageRate") self.average_rate = value - self.validate_Nml2Quantity_pertime(self.average_rate) # validate type Nml2Quantity_pertime - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + self.validate_Nml2Quantity_pertime( + self.average_rate + ) # validate type Nml2Quantity_pertime + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('synapse', node) - if value is not None and 'synapse' not in already_processed: - already_processed.add('synapse') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("synapse", node) + if value is not None and "synapse" not in already_processed: + already_processed.add("synapse") self.synapse = value - value = find_attr_value_('spikeTarget', node) - if value is not None and 'spikeTarget' not in already_processed: - already_processed.add('spikeTarget') + value = find_attr_value_("spikeTarget", node) + if value is not None and "spikeTarget" not in already_processed: + already_processed.add("spikeTarget") self.spike_target = value - super(TransientPoissonFiringSynapse, self).buildAttributes(node, attrs, already_processed) + super(TransientPoissonFiringSynapse, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(TransientPoissonFiringSynapse, self).buildChildren(child_, node, nodeName_, True) + super(TransientPoissonFiringSynapse, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class TransientPoissonFiringSynapse class PoissonFiringSynapse(Standalone): member_data_items_ = [ - MemberSpec_('average_rate', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), - MemberSpec_('synapse', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('spike_target', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_("average_rate", "Nml2Quantity_pertime", 0, 0, {"use": u"required"}), + MemberSpec_("synapse", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("spike_target", "xs:string", 0, 0, {"use": u"required"}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, synapse=None, spike_target=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + average_rate=None, + synapse=None, + spike_target=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(PoissonFiringSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(PoissonFiringSynapse, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.average_rate = _cast(None, average_rate) self.synapse = _cast(None, synapse) self.spike_target = _cast(None, spike_target) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, PoissonFiringSynapse) + CurrentSubclassModule_, PoissonFiringSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if PoissonFiringSynapse.subclass: return PoissonFiringSynapse.subclass(*args_, **kwargs_) else: return PoissonFiringSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.validate_Nml2Quantity_pertime_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$"] + ] + def hasContent_(self): - if ( - super(PoissonFiringSynapse, self).hasContent_() - ): + if super(PoissonFiringSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PoissonFiringSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('PoissonFiringSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PoissonFiringSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PoissonFiringSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PoissonFiringSynapse') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PoissonFiringSynapse", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PoissonFiringSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PoissonFiringSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PoissonFiringSynapse'): - super(PoissonFiringSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PoissonFiringSynapse') - if self.average_rate is not None and 'average_rate' not in already_processed: - already_processed.add('average_rate') - outfile.write(' averageRate=%s' % (quote_attrib(self.average_rate), )) - if self.synapse is not None and 'synapse' not in already_processed: - already_processed.add('synapse') - outfile.write(' synapse=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse), input_name='synapse')), )) - if self.spike_target is not None and 'spike_target' not in already_processed: - already_processed.add('spike_target') - outfile.write(' spikeTarget=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.spike_target), input_name='spikeTarget')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PoissonFiringSynapse', fromsubclass_=False, pretty_print=True): - super(PoissonFiringSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PoissonFiringSynapse", + ): + super(PoissonFiringSynapse, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PoissonFiringSynapse", + ) + if self.average_rate is not None and "average_rate" not in already_processed: + already_processed.add("average_rate") + outfile.write(" averageRate=%s" % (quote_attrib(self.average_rate),)) + if self.synapse is not None and "synapse" not in already_processed: + already_processed.add("synapse") + outfile.write( + " synapse=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse), input_name="synapse" + ) + ), + ) + ) + if self.spike_target is not None and "spike_target" not in already_processed: + already_processed.add("spike_target") + outfile.write( + " spikeTarget=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.spike_target), input_name="spikeTarget" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PoissonFiringSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(PoissonFiringSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -8589,96 +14921,194 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('averageRate', node) - if value is not None and 'averageRate' not in already_processed: - already_processed.add('averageRate') + value = find_attr_value_("averageRate", node) + if value is not None and "averageRate" not in already_processed: + already_processed.add("averageRate") self.average_rate = value - self.validate_Nml2Quantity_pertime(self.average_rate) # validate type Nml2Quantity_pertime - value = find_attr_value_('synapse', node) - if value is not None and 'synapse' not in already_processed: - already_processed.add('synapse') + self.validate_Nml2Quantity_pertime( + self.average_rate + ) # validate type Nml2Quantity_pertime + value = find_attr_value_("synapse", node) + if value is not None and "synapse" not in already_processed: + already_processed.add("synapse") self.synapse = value - value = find_attr_value_('spikeTarget', node) - if value is not None and 'spikeTarget' not in already_processed: - already_processed.add('spikeTarget') + value = find_attr_value_("spikeTarget", node) + if value is not None and "spikeTarget" not in already_processed: + already_processed.add("spikeTarget") self.spike_target = value - super(PoissonFiringSynapse, self).buildAttributes(node, attrs, already_processed) + super(PoissonFiringSynapse, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(PoissonFiringSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class PoissonFiringSynapse class SpikeGeneratorPoisson(Standalone): member_data_items_ = [ - MemberSpec_('average_rate', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), + MemberSpec_("average_rate", "Nml2Quantity_pertime", 0, 0, {"use": u"required"}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + average_rate=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeGeneratorPoisson, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(SpikeGeneratorPoisson, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.average_rate = _cast(None, average_rate) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeGeneratorPoisson) + CurrentSubclassModule_, SpikeGeneratorPoisson + ) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeGeneratorPoisson.subclass: return SpikeGeneratorPoisson.subclass(*args_, **kwargs_) else: return SpikeGeneratorPoisson(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.validate_Nml2Quantity_pertime_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$"] + ] + def hasContent_(self): - if ( - super(SpikeGeneratorPoisson, self).hasContent_() - ): + if super(SpikeGeneratorPoisson, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorPoisson', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeGeneratorPoisson') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGeneratorPoisson", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeGeneratorPoisson") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorPoisson') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeGeneratorPoisson", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGeneratorPoisson', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeGeneratorPoisson", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGeneratorPoisson'): - super(SpikeGeneratorPoisson, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorPoisson') - if self.average_rate is not None and 'average_rate' not in already_processed: - already_processed.add('average_rate') - outfile.write(' averageRate=%s' % (quote_attrib(self.average_rate), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpikeGeneratorPoisson", + ): + super(SpikeGeneratorPoisson, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeGeneratorPoisson", + ) + if self.average_rate is not None and "average_rate" not in already_processed: + already_processed.add("average_rate") + outfile.write(" averageRate=%s" % (quote_attrib(self.average_rate),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorPoisson', fromsubclass_=False, pretty_print=True): - super(SpikeGeneratorPoisson, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGeneratorPoisson", + fromsubclass_=False, + pretty_print=True, + ): + super(SpikeGeneratorPoisson, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -8686,92 +15116,183 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('averageRate', node) - if value is not None and 'averageRate' not in already_processed: - already_processed.add('averageRate') + value = find_attr_value_("averageRate", node) + if value is not None and "averageRate" not in already_processed: + already_processed.add("averageRate") self.average_rate = value - self.validate_Nml2Quantity_pertime(self.average_rate) # validate type Nml2Quantity_pertime - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_pertime( + self.average_rate + ) # validate type Nml2Quantity_pertime + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(SpikeGeneratorPoisson, self).buildAttributes(node, attrs, already_processed) + super(SpikeGeneratorPoisson, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(SpikeGeneratorPoisson, self).buildChildren(child_, node, nodeName_, True) pass + + # end class SpikeGeneratorPoisson class SpikeGeneratorRandom(Standalone): member_data_items_ = [ - MemberSpec_('max_isi', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('min_isi', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("max_isi", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("min_isi", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, max_isi=None, min_isi=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + max_isi=None, + min_isi=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeGeneratorRandom, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(SpikeGeneratorRandom, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.max_isi = _cast(None, max_isi) self.min_isi = _cast(None, min_isi) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeGeneratorRandom) + CurrentSubclassModule_, SpikeGeneratorRandom + ) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeGeneratorRandom.subclass: return SpikeGeneratorRandom.subclass(*args_, **kwargs_) else: return SpikeGeneratorRandom(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(SpikeGeneratorRandom, self).hasContent_() - ): + if super(SpikeGeneratorRandom, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorRandom', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeGeneratorRandom') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGeneratorRandom", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeGeneratorRandom") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRandom') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeGeneratorRandom", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGeneratorRandom', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeGeneratorRandom", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGeneratorRandom'): - super(SpikeGeneratorRandom, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRandom') - if self.max_isi is not None and 'max_isi' not in already_processed: - already_processed.add('max_isi') - outfile.write(' maxISI=%s' % (quote_attrib(self.max_isi), )) - if self.min_isi is not None and 'min_isi' not in already_processed: - already_processed.add('min_isi') - outfile.write(' minISI=%s' % (quote_attrib(self.min_isi), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorRandom', fromsubclass_=False, pretty_print=True): - super(SpikeGeneratorRandom, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpikeGeneratorRandom", + ): + super(SpikeGeneratorRandom, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeGeneratorRandom", + ) + if self.max_isi is not None and "max_isi" not in already_processed: + already_processed.add("max_isi") + outfile.write(" maxISI=%s" % (quote_attrib(self.max_isi),)) + if self.min_isi is not None and "min_isi" not in already_processed: + already_processed.add("min_isi") + outfile.write(" minISI=%s" % (quote_attrib(self.min_isi),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGeneratorRandom", + fromsubclass_=False, + pretty_print=True, + ): + super(SpikeGeneratorRandom, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -8779,88 +15300,170 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('maxISI', node) - if value is not None and 'maxISI' not in already_processed: - already_processed.add('maxISI') + value = find_attr_value_("maxISI", node) + if value is not None and "maxISI" not in already_processed: + already_processed.add("maxISI") self.max_isi = value - self.validate_Nml2Quantity_time(self.max_isi) # validate type Nml2Quantity_time - value = find_attr_value_('minISI', node) - if value is not None and 'minISI' not in already_processed: - already_processed.add('minISI') + self.validate_Nml2Quantity_time( + self.max_isi + ) # validate type Nml2Quantity_time + value = find_attr_value_("minISI", node) + if value is not None and "minISI" not in already_processed: + already_processed.add("minISI") self.min_isi = value - self.validate_Nml2Quantity_time(self.min_isi) # validate type Nml2Quantity_time - super(SpikeGeneratorRandom, self).buildAttributes(node, attrs, already_processed) + self.validate_Nml2Quantity_time( + self.min_isi + ) # validate type Nml2Quantity_time + super(SpikeGeneratorRandom, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(SpikeGeneratorRandom, self).buildChildren(child_, node, nodeName_, True) pass + + # end class SpikeGeneratorRandom class SpikeGenerator(Standalone): member_data_items_ = [ - MemberSpec_('period', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("period", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, period=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + period=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeGenerator, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(SpikeGenerator, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.period = _cast(None, period) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeGenerator) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SpikeGenerator) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeGenerator.subclass: return SpikeGenerator.subclass(*args_, **kwargs_) else: return SpikeGenerator(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(SpikeGenerator, self).hasContent_() - ): + if super(SpikeGenerator, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGenerator', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeGenerator') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGenerator", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeGenerator") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGenerator') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SpikeGenerator" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGenerator', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeGenerator", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGenerator'): - super(SpikeGenerator, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGenerator') - if self.period is not None and 'period' not in already_processed: - already_processed.add('period') - outfile.write(' period=%s' % (quote_attrib(self.period), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGenerator', fromsubclass_=False, pretty_print=True): - super(SpikeGenerator, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpikeGenerator", + ): + super(SpikeGenerator, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SpikeGenerator" + ) + if self.period is not None and "period" not in already_processed: + already_processed.add("period") + outfile.write(" period=%s" % (quote_attrib(self.period),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGenerator", + fromsubclass_=False, + pretty_print=True, + ): + super(SpikeGenerator, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -8868,100 +15471,215 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('period', node) - if value is not None and 'period' not in already_processed: - already_processed.add('period') + value = find_attr_value_("period", node) + if value is not None and "period" not in already_processed: + already_processed.add("period") self.period = value - self.validate_Nml2Quantity_time(self.period) # validate type Nml2Quantity_time + self.validate_Nml2Quantity_time( + self.period + ) # validate type Nml2Quantity_time super(SpikeGenerator, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(SpikeGenerator, self).buildChildren(child_, node, nodeName_, True) pass + + # end class SpikeGenerator class TimedSynapticInput(Standalone): member_data_items_ = [ - MemberSpec_('synapse', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('spike_target', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('spikes', 'Spike', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Spike', u'name': u'spike', u'minOccurs': u'0'}, None), + MemberSpec_("synapse", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("spike_target", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_( + "spikes", + "Spike", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Spike", + u"name": u"spike", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, synapse=None, spike_target=None, spikes=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + synapse=None, + spike_target=None, + spikes=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(TimedSynapticInput, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(TimedSynapticInput, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.synapse = _cast(None, synapse) self.spike_target = _cast(None, spike_target) if spikes is None: self.spikes = [] else: self.spikes = spikes + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, TimedSynapticInput) + CurrentSubclassModule_, TimedSynapticInput + ) if subclass is not None: return subclass(*args_, **kwargs_) if TimedSynapticInput.subclass: return TimedSynapticInput.subclass(*args_, **kwargs_) else: return TimedSynapticInput(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): - if ( - self.spikes or - super(TimedSynapticInput, self).hasContent_() - ): + if self.spikes or super(TimedSynapticInput, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimedSynapticInput', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('TimedSynapticInput') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="TimedSynapticInput", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("TimedSynapticInput") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimedSynapticInput') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="TimedSynapticInput", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TimedSynapticInput', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="TimedSynapticInput", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TimedSynapticInput'): - super(TimedSynapticInput, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimedSynapticInput') - if self.synapse is not None and 'synapse' not in already_processed: - already_processed.add('synapse') - outfile.write(' synapse=%s' % (quote_attrib(self.synapse), )) - if self.spike_target is not None and 'spike_target' not in already_processed: - already_processed.add('spike_target') - outfile.write(' spikeTarget=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.spike_target), input_name='spikeTarget')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimedSynapticInput', fromsubclass_=False, pretty_print=True): - super(TimedSynapticInput, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="TimedSynapticInput", + ): + super(TimedSynapticInput, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="TimedSynapticInput", + ) + if self.synapse is not None and "synapse" not in already_processed: + already_processed.add("synapse") + outfile.write(" synapse=%s" % (quote_attrib(self.synapse),)) + if self.spike_target is not None and "spike_target" not in already_processed: + already_processed.add("spike_target") + outfile.write( + " spikeTarget=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.spike_target), input_name="spikeTarget" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="TimedSynapticInput", + fromsubclass_=False, + pretty_print=True, + ): + super(TimedSynapticInput, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for spike_ in self.spikes: - spike_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spike', pretty_print=pretty_print) + spike_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spike", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -8969,91 +15687,169 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('synapse', node) - if value is not None and 'synapse' not in already_processed: - already_processed.add('synapse') + value = find_attr_value_("synapse", node) + if value is not None and "synapse" not in already_processed: + already_processed.add("synapse") self.synapse = value - self.validate_NmlId(self.synapse) # validate type NmlId - value = find_attr_value_('spikeTarget', node) - if value is not None and 'spikeTarget' not in already_processed: - already_processed.add('spikeTarget') + self.validate_NmlId(self.synapse) # validate type NmlId + value = find_attr_value_("spikeTarget", node) + if value is not None and "spikeTarget" not in already_processed: + already_processed.add("spikeTarget") self.spike_target = value super(TimedSynapticInput, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'spike': + if nodeName_ == "spike": obj_ = Spike.factory(parent_object_=self) obj_.build(child_) self.spikes.append(obj_) - obj_.original_tagname_ = 'spike' + obj_.original_tagname_ = "spike" super(TimedSynapticInput, self).buildChildren(child_, node, nodeName_, True) + + # end class TimedSynapticInput class SpikeArray(Standalone): member_data_items_ = [ - MemberSpec_('spikes', 'Spike', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Spike', u'name': u'spike', u'minOccurs': u'0'}, None), + MemberSpec_( + "spikes", + "Spike", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Spike", + u"name": u"spike", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, spikes=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + spikes=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeArray, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(SpikeArray, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) if spikes is None: self.spikes = [] else: self.spikes = spikes + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeArray) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SpikeArray) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeArray.subclass: return SpikeArray.subclass(*args_, **kwargs_) else: return SpikeArray(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - self.spikes or - super(SpikeArray, self).hasContent_() - ): + if self.spikes or super(SpikeArray, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeArray', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeArray') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeArray", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeArray") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeArray') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SpikeArray" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeArray', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeArray", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeArray'): - super(SpikeArray, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeArray') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeArray', fromsubclass_=False, pretty_print=True): - super(SpikeArray, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="SpikeArray" + ): + super(SpikeArray, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SpikeArray" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeArray", + fromsubclass_=False, + pretty_print=True, + ): + super(SpikeArray, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for spike_ in self.spikes: - spike_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spike', pretty_print=pretty_print) + spike_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spike", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -9061,82 +15857,141 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(SpikeArray, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'spike': + if nodeName_ == "spike": obj_ = Spike.factory(parent_object_=self) obj_.build(child_) self.spikes.append(obj_) - obj_.original_tagname_ = 'spike' + obj_.original_tagname_ = "spike" super(SpikeArray, self).buildChildren(child_, node, nodeName_, True) + + # end class SpikeArray class Spike(BaseNonNegativeIntegerId): member_data_items_ = [ - MemberSpec_('time', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("time", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseNonNegativeIntegerId + def __init__(self, neuro_lex_id=None, id=None, time=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Spike, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Spike, self).__init__(neuro_lex_id, id, **kwargs_) self.time = _cast(None, time) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Spike) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Spike) if subclass is not None: return subclass(*args_, **kwargs_) if Spike.subclass: return Spike.subclass(*args_, **kwargs_) else: return Spike(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(Spike, self).hasContent_() - ): + if super(Spike, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Spike', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Spike') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Spike", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Spike") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Spike') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Spike" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Spike', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Spike'): - super(Spike, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Spike') - if self.time is not None and 'time' not in already_processed: - already_processed.add('time') - outfile.write(' time=%s' % (quote_attrib(self.time), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Spike', fromsubclass_=False, pretty_print=True): - super(Spike, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Spike", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Spike" + ): + super(Spike, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Spike" + ) + if self.time is not None and "time" not in already_processed: + already_processed.add("time") + outfile.write(" time=%s" % (quote_attrib(self.time),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Spike", + fromsubclass_=False, + pretty_print=True, + ): + super(Spike, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -9144,35 +15999,72 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('time', node) - if value is not None and 'time' not in already_processed: - already_processed.add('time') + value = find_attr_value_("time", node) + if value is not None and "time" not in already_processed: + already_processed.add("time") self.time = value - self.validate_Nml2Quantity_time(self.time) # validate type Nml2Quantity_time + self.validate_Nml2Quantity_time( + self.time + ) # validate type Nml2Quantity_time super(Spike, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(Spike, self).buildChildren(child_, node, nodeName_, True) pass + + # end class Spike class VoltageClampTriple(Standalone): member_data_items_ = [ - MemberSpec_('active', 'ZeroOrOne', 0, 0, {'use': u'required'}), - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('conditioning_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('testing_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('return_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('simple_series_resistance', 'Nml2Quantity_resistance', 0, 0, {'use': u'required'}), + MemberSpec_("active", "ZeroOrOne", 0, 0, {"use": u"required"}), + MemberSpec_("delay", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("duration", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_( + "conditioning_voltage", "Nml2Quantity_voltage", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "testing_voltage", "Nml2Quantity_voltage", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "return_voltage", "Nml2Quantity_voltage", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "simple_series_resistance", + "Nml2Quantity_resistance", + 0, + 0, + {"use": u"required"}, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, active=None, delay=None, duration=None, conditioning_voltage=None, testing_voltage=None, return_voltage=None, simple_series_resistance=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + active=None, + delay=None, + duration=None, + conditioning_voltage=None, + testing_voltage=None, + return_voltage=None, + simple_series_resistance=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(VoltageClampTriple, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(VoltageClampTriple, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.active = _cast(float, active) self.delay = _cast(None, delay) self.duration = _cast(None, duration) @@ -9180,103 +16072,215 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti self.testing_voltage = _cast(None, testing_voltage) self.return_voltage = _cast(None, return_voltage) self.simple_series_resistance = _cast(None, simple_series_resistance) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, VoltageClampTriple) + CurrentSubclassModule_, VoltageClampTriple + ) if subclass is not None: return subclass(*args_, **kwargs_) if VoltageClampTriple.subclass: return VoltageClampTriple.subclass(*args_, **kwargs_) else: return VoltageClampTriple(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_ZeroOrOne(self, value): # Validate type ZeroOrOne, a restriction on xs:double. if value is not None and Validate_simpletypes_: value = str(value) - enumerations = ['0', '1'] + enumerations = ["0", "1"] enumeration_respectee = False for enum in enumerations: if value == enum: enumeration_respectee = True break if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on ZeroOrOne' % {"value" : value.encode("utf-8")} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd enumeration restriction on ZeroOrOne' + % {"value": value.encode("utf-8")} + ) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def validate_Nml2Quantity_resistance(self, value): # Validate type Nml2Quantity_resistance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_resistance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_resistance_patterns_, )) - validate_Nml2Quantity_resistance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm|kohm|Mohm)$']] + self.validate_Nml2Quantity_resistance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_resistance_patterns_, + ) + ) + + validate_Nml2Quantity_resistance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm|kohm|Mohm)$"] + ] + def hasContent_(self): - if ( - super(VoltageClampTriple, self).hasContent_() - ): + if super(VoltageClampTriple, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VoltageClampTriple', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('VoltageClampTriple') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="VoltageClampTriple", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("VoltageClampTriple") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClampTriple') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="VoltageClampTriple", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VoltageClampTriple', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="VoltageClampTriple", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VoltageClampTriple'): - super(VoltageClampTriple, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClampTriple') - if self.active is not None and 'active' not in already_processed: - already_processed.add('active') - outfile.write(' active=%s' % (quote_attrib(self.active), )) - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.conditioning_voltage is not None and 'conditioning_voltage' not in already_processed: - already_processed.add('conditioning_voltage') - outfile.write(' conditioningVoltage=%s' % (quote_attrib(self.conditioning_voltage), )) - if self.testing_voltage is not None and 'testing_voltage' not in already_processed: - already_processed.add('testing_voltage') - outfile.write(' testingVoltage=%s' % (quote_attrib(self.testing_voltage), )) - if self.return_voltage is not None and 'return_voltage' not in already_processed: - already_processed.add('return_voltage') - outfile.write(' returnVoltage=%s' % (quote_attrib(self.return_voltage), )) - if self.simple_series_resistance is not None and 'simple_series_resistance' not in already_processed: - already_processed.add('simple_series_resistance') - outfile.write(' simpleSeriesResistance=%s' % (quote_attrib(self.simple_series_resistance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VoltageClampTriple', fromsubclass_=False, pretty_print=True): - super(VoltageClampTriple, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="VoltageClampTriple", + ): + super(VoltageClampTriple, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="VoltageClampTriple", + ) + if self.active is not None and "active" not in already_processed: + already_processed.add("active") + outfile.write(" active=%s" % (quote_attrib(self.active),)) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write(" delay=%s" % (quote_attrib(self.delay),)) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write(" duration=%s" % (quote_attrib(self.duration),)) + if ( + self.conditioning_voltage is not None + and "conditioning_voltage" not in already_processed + ): + already_processed.add("conditioning_voltage") + outfile.write( + " conditioningVoltage=%s" % (quote_attrib(self.conditioning_voltage),) + ) + if ( + self.testing_voltage is not None + and "testing_voltage" not in already_processed + ): + already_processed.add("testing_voltage") + outfile.write(" testingVoltage=%s" % (quote_attrib(self.testing_voltage),)) + if ( + self.return_voltage is not None + and "return_voltage" not in already_processed + ): + already_processed.add("return_voltage") + outfile.write(" returnVoltage=%s" % (quote_attrib(self.return_voltage),)) + if ( + self.simple_series_resistance is not None + and "simple_series_resistance" not in already_processed + ): + already_processed.add("simple_series_resistance") + outfile.write( + " simpleSeriesResistance=%s" + % (quote_attrib(self.simple_series_resistance),) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="VoltageClampTriple", + fromsubclass_=False, + pretty_print=True, + ): + super(VoltageClampTriple, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -9284,145 +16288,275 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('active', node) - if value is not None and 'active' not in already_processed: - already_processed.add('active') + value = find_attr_value_("active", node) + if value is not None and "active" not in already_processed: + already_processed.add("active") try: self.active = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (active): %s' % exp) - self.validate_ZeroOrOne(self.active) # validate type ZeroOrOne - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + raise ValueError("Bad float/double attribute (active): %s" % exp) + self.validate_ZeroOrOne(self.active) # validate type ZeroOrOne + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('conditioningVoltage', node) - if value is not None and 'conditioningVoltage' not in already_processed: - already_processed.add('conditioningVoltage') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("conditioningVoltage", node) + if value is not None and "conditioningVoltage" not in already_processed: + already_processed.add("conditioningVoltage") self.conditioning_voltage = value - self.validate_Nml2Quantity_voltage(self.conditioning_voltage) # validate type Nml2Quantity_voltage - value = find_attr_value_('testingVoltage', node) - if value is not None and 'testingVoltage' not in already_processed: - already_processed.add('testingVoltage') + self.validate_Nml2Quantity_voltage( + self.conditioning_voltage + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("testingVoltage", node) + if value is not None and "testingVoltage" not in already_processed: + already_processed.add("testingVoltage") self.testing_voltage = value - self.validate_Nml2Quantity_voltage(self.testing_voltage) # validate type Nml2Quantity_voltage - value = find_attr_value_('returnVoltage', node) - if value is not None and 'returnVoltage' not in already_processed: - already_processed.add('returnVoltage') + self.validate_Nml2Quantity_voltage( + self.testing_voltage + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("returnVoltage", node) + if value is not None and "returnVoltage" not in already_processed: + already_processed.add("returnVoltage") self.return_voltage = value - self.validate_Nml2Quantity_voltage(self.return_voltage) # validate type Nml2Quantity_voltage - value = find_attr_value_('simpleSeriesResistance', node) - if value is not None and 'simpleSeriesResistance' not in already_processed: - already_processed.add('simpleSeriesResistance') + self.validate_Nml2Quantity_voltage( + self.return_voltage + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("simpleSeriesResistance", node) + if value is not None and "simpleSeriesResistance" not in already_processed: + already_processed.add("simpleSeriesResistance") self.simple_series_resistance = value - self.validate_Nml2Quantity_resistance(self.simple_series_resistance) # validate type Nml2Quantity_resistance + self.validate_Nml2Quantity_resistance( + self.simple_series_resistance + ) # validate type Nml2Quantity_resistance super(VoltageClampTriple, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(VoltageClampTriple, self).buildChildren(child_, node, nodeName_, True) pass + + # end class VoltageClampTriple class VoltageClamp(Standalone): member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('target_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('simple_series_resistance', 'Nml2Quantity_resistance', 0, 0, {'use': u'required'}), + MemberSpec_("delay", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("duration", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_( + "target_voltage", "Nml2Quantity_voltage", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "simple_series_resistance", + "Nml2Quantity_resistance", + 0, + 0, + {"use": u"required"}, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, target_voltage=None, simple_series_resistance=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + duration=None, + target_voltage=None, + simple_series_resistance=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(VoltageClamp, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(VoltageClamp, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) self.duration = _cast(None, duration) self.target_voltage = _cast(None, target_voltage) self.simple_series_resistance = _cast(None, simple_series_resistance) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, VoltageClamp) + subclass = getSubclassFromModule_(CurrentSubclassModule_, VoltageClamp) if subclass is not None: return subclass(*args_, **kwargs_) if VoltageClamp.subclass: return VoltageClamp.subclass(*args_, **kwargs_) else: return VoltageClamp(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def validate_Nml2Quantity_resistance(self, value): # Validate type Nml2Quantity_resistance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_resistance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_resistance_patterns_, )) - validate_Nml2Quantity_resistance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm|kohm|Mohm)$']] + self.validate_Nml2Quantity_resistance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_resistance_patterns_, + ) + ) + + validate_Nml2Quantity_resistance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm|kohm|Mohm)$"] + ] + def hasContent_(self): - if ( - super(VoltageClamp, self).hasContent_() - ): + if super(VoltageClamp, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VoltageClamp', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('VoltageClamp') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="VoltageClamp", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("VoltageClamp") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClamp') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="VoltageClamp" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VoltageClamp', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="VoltageClamp", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VoltageClamp'): - super(VoltageClamp, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClamp') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.target_voltage is not None and 'target_voltage' not in already_processed: - already_processed.add('target_voltage') - outfile.write(' targetVoltage=%s' % (quote_attrib(self.target_voltage), )) - if self.simple_series_resistance is not None and 'simple_series_resistance' not in already_processed: - already_processed.add('simple_series_resistance') - outfile.write(' simpleSeriesResistance=%s' % (quote_attrib(self.simple_series_resistance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VoltageClamp', fromsubclass_=False, pretty_print=True): - super(VoltageClamp, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="VoltageClamp", + ): + super(VoltageClamp, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="VoltageClamp" + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write(" delay=%s" % (quote_attrib(self.delay),)) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write(" duration=%s" % (quote_attrib(self.duration),)) + if ( + self.target_voltage is not None + and "target_voltage" not in already_processed + ): + already_processed.add("target_voltage") + outfile.write(" targetVoltage=%s" % (quote_attrib(self.target_voltage),)) + if ( + self.simple_series_resistance is not None + and "simple_series_resistance" not in already_processed + ): + already_processed.add("simple_series_resistance") + outfile.write( + " simpleSeriesResistance=%s" + % (quote_attrib(self.simple_series_resistance),) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="VoltageClamp", + fromsubclass_=False, + pretty_print=True, + ): + super(VoltageClamp, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -9430,46 +16564,109 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('targetVoltage', node) - if value is not None and 'targetVoltage' not in already_processed: - already_processed.add('targetVoltage') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("targetVoltage", node) + if value is not None and "targetVoltage" not in already_processed: + already_processed.add("targetVoltage") self.target_voltage = value - self.validate_Nml2Quantity_voltage(self.target_voltage) # validate type Nml2Quantity_voltage - value = find_attr_value_('simpleSeriesResistance', node) - if value is not None and 'simpleSeriesResistance' not in already_processed: - already_processed.add('simpleSeriesResistance') + self.validate_Nml2Quantity_voltage( + self.target_voltage + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("simpleSeriesResistance", node) + if value is not None and "simpleSeriesResistance" not in already_processed: + already_processed.add("simpleSeriesResistance") self.simple_series_resistance = value - self.validate_Nml2Quantity_resistance(self.simple_series_resistance) # validate type Nml2Quantity_resistance + self.validate_Nml2Quantity_resistance( + self.simple_series_resistance + ) # validate type Nml2Quantity_resistance super(VoltageClamp, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(VoltageClamp, self).buildChildren(child_, node, nodeName_, True) pass + + # end class VoltageClamp class CompoundInputDL(Standalone): member_data_items_ = [ - MemberSpec_('pulse_generator_dls', 'PulseGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PulseGeneratorDL', u'name': u'pulseGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('sine_generator_dls', 'SineGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SineGeneratorDL', u'name': u'sineGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('ramp_generator_dls', 'RampGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'RampGeneratorDL', u'name': u'rampGeneratorDL', u'minOccurs': u'0'}, None), + MemberSpec_( + "pulse_generator_dls", + "PulseGeneratorDL", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"PulseGeneratorDL", + u"name": u"pulseGeneratorDL", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "sine_generator_dls", + "SineGeneratorDL", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SineGeneratorDL", + u"name": u"sineGeneratorDL", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "ramp_generator_dls", + "RampGeneratorDL", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"RampGeneratorDL", + u"name": u"rampGeneratorDL", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, pulse_generator_dls=None, sine_generator_dls=None, ramp_generator_dls=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + pulse_generator_dls=None, + sine_generator_dls=None, + ramp_generator_dls=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(CompoundInputDL, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(CompoundInputDL, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) if pulse_generator_dls is None: self.pulse_generator_dls = [] else: @@ -9482,62 +16679,133 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti self.ramp_generator_dls = [] else: self.ramp_generator_dls = ramp_generator_dls + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, CompoundInputDL) + subclass = getSubclassFromModule_(CurrentSubclassModule_, CompoundInputDL) if subclass is not None: return subclass(*args_, **kwargs_) if CompoundInputDL.subclass: return CompoundInputDL.subclass(*args_, **kwargs_) else: return CompoundInputDL(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.pulse_generator_dls or - self.sine_generator_dls or - self.ramp_generator_dls or - super(CompoundInputDL, self).hasContent_() + self.pulse_generator_dls + or self.sine_generator_dls + or self.ramp_generator_dls + or super(CompoundInputDL, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CompoundInputDL', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('CompoundInputDL') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CompoundInputDL", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("CompoundInputDL") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInputDL') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CompoundInputDL" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CompoundInputDL', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="CompoundInputDL", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CompoundInputDL'): - super(CompoundInputDL, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInputDL') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CompoundInputDL', fromsubclass_=False, pretty_print=True): - super(CompoundInputDL, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="CompoundInputDL", + ): + super(CompoundInputDL, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CompoundInputDL" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CompoundInputDL", + fromsubclass_=False, + pretty_print=True, + ): + super(CompoundInputDL, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for pulseGeneratorDL_ in self.pulse_generator_dls: - pulseGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pulseGeneratorDL', pretty_print=pretty_print) + pulseGeneratorDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="pulseGeneratorDL", + pretty_print=pretty_print, + ) for sineGeneratorDL_ in self.sine_generator_dls: - sineGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='sineGeneratorDL', pretty_print=pretty_print) + sineGeneratorDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="sineGeneratorDL", + pretty_print=pretty_print, + ) for rampGeneratorDL_ in self.ramp_generator_dls: - rampGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='rampGeneratorDL', pretty_print=pretty_print) + rampGeneratorDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="rampGeneratorDL", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -9545,40 +16813,95 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(CompoundInputDL, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'pulseGeneratorDL': + if nodeName_ == "pulseGeneratorDL": obj_ = PulseGeneratorDL.factory(parent_object_=self) obj_.build(child_) self.pulse_generator_dls.append(obj_) - obj_.original_tagname_ = 'pulseGeneratorDL' - elif nodeName_ == 'sineGeneratorDL': + obj_.original_tagname_ = "pulseGeneratorDL" + elif nodeName_ == "sineGeneratorDL": obj_ = SineGeneratorDL.factory(parent_object_=self) obj_.build(child_) self.sine_generator_dls.append(obj_) - obj_.original_tagname_ = 'sineGeneratorDL' - elif nodeName_ == 'rampGeneratorDL': + obj_.original_tagname_ = "sineGeneratorDL" + elif nodeName_ == "rampGeneratorDL": obj_ = RampGeneratorDL.factory(parent_object_=self) obj_.build(child_) self.ramp_generator_dls.append(obj_) - obj_.original_tagname_ = 'rampGeneratorDL' + obj_.original_tagname_ = "rampGeneratorDL" super(CompoundInputDL, self).buildChildren(child_, node, nodeName_, True) + + # end class CompoundInputDL class CompoundInput(Standalone): member_data_items_ = [ - MemberSpec_('pulse_generators', 'PulseGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PulseGenerator', u'name': u'pulseGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('sine_generators', 'SineGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SineGenerator', u'name': u'sineGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('ramp_generators', 'RampGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'RampGenerator', u'name': u'rampGenerator', u'minOccurs': u'0'}, None), + MemberSpec_( + "pulse_generators", + "PulseGenerator", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"PulseGenerator", + u"name": u"pulseGenerator", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "sine_generators", + "SineGenerator", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SineGenerator", + u"name": u"sineGenerator", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "ramp_generators", + "RampGenerator", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"RampGenerator", + u"name": u"rampGenerator", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, pulse_generators=None, sine_generators=None, ramp_generators=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + pulse_generators=None, + sine_generators=None, + ramp_generators=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(CompoundInput, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(CompoundInput, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) if pulse_generators is None: self.pulse_generators = [] else: @@ -9591,62 +16914,133 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti self.ramp_generators = [] else: self.ramp_generators = ramp_generators + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, CompoundInput) + subclass = getSubclassFromModule_(CurrentSubclassModule_, CompoundInput) if subclass is not None: return subclass(*args_, **kwargs_) if CompoundInput.subclass: return CompoundInput.subclass(*args_, **kwargs_) else: return CompoundInput(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.pulse_generators or - self.sine_generators or - self.ramp_generators or - super(CompoundInput, self).hasContent_() + self.pulse_generators + or self.sine_generators + or self.ramp_generators + or super(CompoundInput, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CompoundInput', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('CompoundInput') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CompoundInput", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("CompoundInput") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInput') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CompoundInput" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CompoundInput', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="CompoundInput", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CompoundInput'): - super(CompoundInput, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInput') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CompoundInput', fromsubclass_=False, pretty_print=True): - super(CompoundInput, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="CompoundInput", + ): + super(CompoundInput, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CompoundInput" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="CompoundInput", + fromsubclass_=False, + pretty_print=True, + ): + super(CompoundInput, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for pulseGenerator_ in self.pulse_generators: - pulseGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pulseGenerator', pretty_print=pretty_print) + pulseGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="pulseGenerator", + pretty_print=pretty_print, + ) for sineGenerator_ in self.sine_generators: - sineGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='sineGenerator', pretty_print=pretty_print) + sineGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="sineGenerator", + pretty_print=pretty_print, + ) for rampGenerator_ in self.ramp_generators: - rampGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='rampGenerator', pretty_print=pretty_print) + rampGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="rampGenerator", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -9654,119 +17048,227 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(CompoundInput, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'pulseGenerator': + if nodeName_ == "pulseGenerator": obj_ = PulseGenerator.factory(parent_object_=self) obj_.build(child_) self.pulse_generators.append(obj_) - obj_.original_tagname_ = 'pulseGenerator' - elif nodeName_ == 'sineGenerator': + obj_.original_tagname_ = "pulseGenerator" + elif nodeName_ == "sineGenerator": obj_ = SineGenerator.factory(parent_object_=self) obj_.build(child_) self.sine_generators.append(obj_) - obj_.original_tagname_ = 'sineGenerator' - elif nodeName_ == 'rampGenerator': + obj_.original_tagname_ = "sineGenerator" + elif nodeName_ == "rampGenerator": obj_ = RampGenerator.factory(parent_object_=self) obj_.build(child_) self.ramp_generators.append(obj_) - obj_.original_tagname_ = 'rampGenerator' + obj_.original_tagname_ = "rampGenerator" super(CompoundInput, self).buildChildren(child_, node, nodeName_, True) + + # end class CompoundInput class RampGeneratorDL(Standalone): member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('start_amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('finish_amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('baseline_amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_("delay", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("duration", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("start_amplitude", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_( + "finish_amplitude", "Nml2Quantity_none", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "baseline_amplitude", "Nml2Quantity_none", 0, 0, {"use": u"required"} + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, start_amplitude=None, finish_amplitude=None, baseline_amplitude=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + duration=None, + start_amplitude=None, + finish_amplitude=None, + baseline_amplitude=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(RampGeneratorDL, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(RampGeneratorDL, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) self.duration = _cast(None, duration) self.start_amplitude = _cast(None, start_amplitude) self.finish_amplitude = _cast(None, finish_amplitude) self.baseline_amplitude = _cast(None, baseline_amplitude) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, RampGeneratorDL) + subclass = getSubclassFromModule_(CurrentSubclassModule_, RampGeneratorDL) if subclass is not None: return subclass(*args_, **kwargs_) if RampGeneratorDL.subclass: return RampGeneratorDL.subclass(*args_, **kwargs_) else: return RampGeneratorDL(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$"] + ] + def hasContent_(self): - if ( - super(RampGeneratorDL, self).hasContent_() - ): + if super(RampGeneratorDL, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RampGeneratorDL', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('RampGeneratorDL') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="RampGeneratorDL", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("RampGeneratorDL") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGeneratorDL') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="RampGeneratorDL" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RampGeneratorDL', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="RampGeneratorDL", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RampGeneratorDL'): - super(RampGeneratorDL, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGeneratorDL') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.start_amplitude is not None and 'start_amplitude' not in already_processed: - already_processed.add('start_amplitude') - outfile.write(' startAmplitude=%s' % (quote_attrib(self.start_amplitude), )) - if self.finish_amplitude is not None and 'finish_amplitude' not in already_processed: - already_processed.add('finish_amplitude') - outfile.write(' finishAmplitude=%s' % (quote_attrib(self.finish_amplitude), )) - if self.baseline_amplitude is not None and 'baseline_amplitude' not in already_processed: - already_processed.add('baseline_amplitude') - outfile.write(' baselineAmplitude=%s' % (quote_attrib(self.baseline_amplitude), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RampGeneratorDL', fromsubclass_=False, pretty_print=True): - super(RampGeneratorDL, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="RampGeneratorDL", + ): + super(RampGeneratorDL, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="RampGeneratorDL" + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write(" delay=%s" % (quote_attrib(self.delay),)) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write(" duration=%s" % (quote_attrib(self.duration),)) + if ( + self.start_amplitude is not None + and "start_amplitude" not in already_processed + ): + already_processed.add("start_amplitude") + outfile.write(" startAmplitude=%s" % (quote_attrib(self.start_amplitude),)) + if ( + self.finish_amplitude is not None + and "finish_amplitude" not in already_processed + ): + already_processed.add("finish_amplitude") + outfile.write( + " finishAmplitude=%s" % (quote_attrib(self.finish_amplitude),) + ) + if ( + self.baseline_amplitude is not None + and "baseline_amplitude" not in already_processed + ): + already_processed.add("baseline_amplitude") + outfile.write( + " baselineAmplitude=%s" % (quote_attrib(self.baseline_amplitude),) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="RampGeneratorDL", + fromsubclass_=False, + pretty_print=True, + ): + super(RampGeneratorDL, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -9774,130 +17276,250 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('startAmplitude', node) - if value is not None and 'startAmplitude' not in already_processed: - already_processed.add('startAmplitude') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("startAmplitude", node) + if value is not None and "startAmplitude" not in already_processed: + already_processed.add("startAmplitude") self.start_amplitude = value - self.validate_Nml2Quantity_none(self.start_amplitude) # validate type Nml2Quantity_none - value = find_attr_value_('finishAmplitude', node) - if value is not None and 'finishAmplitude' not in already_processed: - already_processed.add('finishAmplitude') + self.validate_Nml2Quantity_none( + self.start_amplitude + ) # validate type Nml2Quantity_none + value = find_attr_value_("finishAmplitude", node) + if value is not None and "finishAmplitude" not in already_processed: + already_processed.add("finishAmplitude") self.finish_amplitude = value - self.validate_Nml2Quantity_none(self.finish_amplitude) # validate type Nml2Quantity_none - value = find_attr_value_('baselineAmplitude', node) - if value is not None and 'baselineAmplitude' not in already_processed: - already_processed.add('baselineAmplitude') + self.validate_Nml2Quantity_none( + self.finish_amplitude + ) # validate type Nml2Quantity_none + value = find_attr_value_("baselineAmplitude", node) + if value is not None and "baselineAmplitude" not in already_processed: + already_processed.add("baselineAmplitude") self.baseline_amplitude = value - self.validate_Nml2Quantity_none(self.baseline_amplitude) # validate type Nml2Quantity_none + self.validate_Nml2Quantity_none( + self.baseline_amplitude + ) # validate type Nml2Quantity_none super(RampGeneratorDL, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(RampGeneratorDL, self).buildChildren(child_, node, nodeName_, True) pass + + # end class RampGeneratorDL class RampGenerator(Standalone): member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('start_amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), - MemberSpec_('finish_amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), - MemberSpec_('baseline_amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_("delay", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("duration", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_( + "start_amplitude", "Nml2Quantity_current", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "finish_amplitude", "Nml2Quantity_current", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "baseline_amplitude", "Nml2Quantity_current", 0, 0, {"use": u"required"} + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, start_amplitude=None, finish_amplitude=None, baseline_amplitude=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + duration=None, + start_amplitude=None, + finish_amplitude=None, + baseline_amplitude=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(RampGenerator, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(RampGenerator, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) self.duration = _cast(None, duration) self.start_amplitude = _cast(None, start_amplitude) self.finish_amplitude = _cast(None, finish_amplitude) self.baseline_amplitude = _cast(None, baseline_amplitude) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, RampGenerator) + subclass = getSubclassFromModule_(CurrentSubclassModule_, RampGenerator) if subclass is not None: return subclass(*args_, **kwargs_) if RampGenerator.subclass: return RampGenerator.subclass(*args_, **kwargs_) else: return RampGenerator(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] + self.validate_Nml2Quantity_current_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_current_patterns_, + ) + ) + + validate_Nml2Quantity_current_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$"] + ] + def hasContent_(self): - if ( - super(RampGenerator, self).hasContent_() - ): + if super(RampGenerator, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RampGenerator', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('RampGenerator') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="RampGenerator", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("RampGenerator") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGenerator') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="RampGenerator" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RampGenerator', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="RampGenerator", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RampGenerator'): - super(RampGenerator, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGenerator') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.start_amplitude is not None and 'start_amplitude' not in already_processed: - already_processed.add('start_amplitude') - outfile.write(' startAmplitude=%s' % (quote_attrib(self.start_amplitude), )) - if self.finish_amplitude is not None and 'finish_amplitude' not in already_processed: - already_processed.add('finish_amplitude') - outfile.write(' finishAmplitude=%s' % (quote_attrib(self.finish_amplitude), )) - if self.baseline_amplitude is not None and 'baseline_amplitude' not in already_processed: - already_processed.add('baseline_amplitude') - outfile.write(' baselineAmplitude=%s' % (quote_attrib(self.baseline_amplitude), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RampGenerator', fromsubclass_=False, pretty_print=True): - super(RampGenerator, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="RampGenerator", + ): + super(RampGenerator, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="RampGenerator" + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write(" delay=%s" % (quote_attrib(self.delay),)) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write(" duration=%s" % (quote_attrib(self.duration),)) + if ( + self.start_amplitude is not None + and "start_amplitude" not in already_processed + ): + already_processed.add("start_amplitude") + outfile.write(" startAmplitude=%s" % (quote_attrib(self.start_amplitude),)) + if ( + self.finish_amplitude is not None + and "finish_amplitude" not in already_processed + ): + already_processed.add("finish_amplitude") + outfile.write( + " finishAmplitude=%s" % (quote_attrib(self.finish_amplitude),) + ) + if ( + self.baseline_amplitude is not None + and "baseline_amplitude" not in already_processed + ): + already_processed.add("baseline_amplitude") + outfile.write( + " baselineAmplitude=%s" % (quote_attrib(self.baseline_amplitude),) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="RampGenerator", + fromsubclass_=False, + pretty_print=True, + ): + super(RampGenerator, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -9905,130 +17527,231 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('startAmplitude', node) - if value is not None and 'startAmplitude' not in already_processed: - already_processed.add('startAmplitude') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("startAmplitude", node) + if value is not None and "startAmplitude" not in already_processed: + already_processed.add("startAmplitude") self.start_amplitude = value - self.validate_Nml2Quantity_current(self.start_amplitude) # validate type Nml2Quantity_current - value = find_attr_value_('finishAmplitude', node) - if value is not None and 'finishAmplitude' not in already_processed: - already_processed.add('finishAmplitude') + self.validate_Nml2Quantity_current( + self.start_amplitude + ) # validate type Nml2Quantity_current + value = find_attr_value_("finishAmplitude", node) + if value is not None and "finishAmplitude" not in already_processed: + already_processed.add("finishAmplitude") self.finish_amplitude = value - self.validate_Nml2Quantity_current(self.finish_amplitude) # validate type Nml2Quantity_current - value = find_attr_value_('baselineAmplitude', node) - if value is not None and 'baselineAmplitude' not in already_processed: - already_processed.add('baselineAmplitude') + self.validate_Nml2Quantity_current( + self.finish_amplitude + ) # validate type Nml2Quantity_current + value = find_attr_value_("baselineAmplitude", node) + if value is not None and "baselineAmplitude" not in already_processed: + already_processed.add("baselineAmplitude") self.baseline_amplitude = value - self.validate_Nml2Quantity_current(self.baseline_amplitude) # validate type Nml2Quantity_current + self.validate_Nml2Quantity_current( + self.baseline_amplitude + ) # validate type Nml2Quantity_current super(RampGenerator, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(RampGenerator, self).buildChildren(child_, node, nodeName_, True) pass + + # end class RampGenerator class SineGeneratorDL(Standalone): member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('phase', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('period', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("delay", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("phase", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("duration", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("amplitude", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("period", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, phase=None, duration=None, amplitude=None, period=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + phase=None, + duration=None, + amplitude=None, + period=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SineGeneratorDL, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(SineGeneratorDL, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) self.phase = _cast(None, phase) self.duration = _cast(None, duration) self.amplitude = _cast(None, amplitude) self.period = _cast(None, period) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SineGeneratorDL) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SineGeneratorDL) if subclass is not None: return subclass(*args_, **kwargs_) if SineGeneratorDL.subclass: return SineGeneratorDL.subclass(*args_, **kwargs_) else: return SineGeneratorDL(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$"] + ] + def hasContent_(self): - if ( - super(SineGeneratorDL, self).hasContent_() - ): + if super(SineGeneratorDL, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SineGeneratorDL', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SineGeneratorDL') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SineGeneratorDL", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SineGeneratorDL") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGeneratorDL') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SineGeneratorDL" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SineGeneratorDL', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SineGeneratorDL", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SineGeneratorDL'): - super(SineGeneratorDL, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGeneratorDL') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.phase is not None and 'phase' not in already_processed: - already_processed.add('phase') - outfile.write(' phase=%s' % (quote_attrib(self.phase), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.amplitude is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') - outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), )) - if self.period is not None and 'period' not in already_processed: - already_processed.add('period') - outfile.write(' period=%s' % (quote_attrib(self.period), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SineGeneratorDL', fromsubclass_=False, pretty_print=True): - super(SineGeneratorDL, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SineGeneratorDL", + ): + super(SineGeneratorDL, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SineGeneratorDL" + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write(" delay=%s" % (quote_attrib(self.delay),)) + if self.phase is not None and "phase" not in already_processed: + already_processed.add("phase") + outfile.write(" phase=%s" % (quote_attrib(self.phase),)) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write(" duration=%s" % (quote_attrib(self.duration),)) + if self.amplitude is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") + outfile.write(" amplitude=%s" % (quote_attrib(self.amplitude),)) + if self.period is not None and "period" not in already_processed: + already_processed.add("period") + outfile.write(" period=%s" % (quote_attrib(self.period),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SineGeneratorDL", + fromsubclass_=False, + pretty_print=True, + ): + super(SineGeneratorDL, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -10036,137 +17759,249 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('phase', node) - if value is not None and 'phase' not in already_processed: - already_processed.add('phase') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("phase", node) + if value is not None and "phase" not in already_processed: + already_processed.add("phase") self.phase = value - self.validate_Nml2Quantity_none(self.phase) # validate type Nml2Quantity_none - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_none( + self.phase + ) # validate type Nml2Quantity_none + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('amplitude', node) - if value is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("amplitude", node) + if value is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") self.amplitude = value - self.validate_Nml2Quantity_none(self.amplitude) # validate type Nml2Quantity_none - value = find_attr_value_('period', node) - if value is not None and 'period' not in already_processed: - already_processed.add('period') + self.validate_Nml2Quantity_none( + self.amplitude + ) # validate type Nml2Quantity_none + value = find_attr_value_("period", node) + if value is not None and "period" not in already_processed: + already_processed.add("period") self.period = value - self.validate_Nml2Quantity_time(self.period) # validate type Nml2Quantity_time + self.validate_Nml2Quantity_time( + self.period + ) # validate type Nml2Quantity_time super(SineGeneratorDL, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(SineGeneratorDL, self).buildChildren(child_, node, nodeName_, True) pass + + # end class SineGeneratorDL class SineGenerator(Standalone): member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('phase', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), - MemberSpec_('period', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("delay", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("phase", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("duration", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("amplitude", "Nml2Quantity_current", 0, 0, {"use": u"required"}), + MemberSpec_("period", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, phase=None, duration=None, amplitude=None, period=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + phase=None, + duration=None, + amplitude=None, + period=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SineGenerator, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(SineGenerator, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) self.phase = _cast(None, phase) self.duration = _cast(None, duration) self.amplitude = _cast(None, amplitude) self.period = _cast(None, period) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SineGenerator) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SineGenerator) if subclass is not None: return subclass(*args_, **kwargs_) if SineGenerator.subclass: return SineGenerator.subclass(*args_, **kwargs_) else: return SineGenerator(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$"] + ] + def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] + self.validate_Nml2Quantity_current_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_current_patterns_, + ) + ) + + validate_Nml2Quantity_current_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$"] + ] + def hasContent_(self): - if ( - super(SineGenerator, self).hasContent_() - ): + if super(SineGenerator, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SineGenerator', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SineGenerator') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SineGenerator", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SineGenerator") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGenerator') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SineGenerator" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SineGenerator', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SineGenerator", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SineGenerator'): - super(SineGenerator, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGenerator') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.phase is not None and 'phase' not in already_processed: - already_processed.add('phase') - outfile.write(' phase=%s' % (quote_attrib(self.phase), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.amplitude is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') - outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), )) - if self.period is not None and 'period' not in already_processed: - already_processed.add('period') - outfile.write(' period=%s' % (quote_attrib(self.period), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SineGenerator', fromsubclass_=False, pretty_print=True): - super(SineGenerator, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SineGenerator", + ): + super(SineGenerator, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SineGenerator" + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write(" delay=%s" % (quote_attrib(self.delay),)) + if self.phase is not None and "phase" not in already_processed: + already_processed.add("phase") + outfile.write(" phase=%s" % (quote_attrib(self.phase),)) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write(" duration=%s" % (quote_attrib(self.duration),)) + if self.amplitude is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") + outfile.write(" amplitude=%s" % (quote_attrib(self.amplitude),)) + if self.period is not None and "period" not in already_processed: + already_processed.add("period") + outfile.write(" period=%s" % (quote_attrib(self.period),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SineGenerator", + fromsubclass_=False, + pretty_print=True, + ): + super(SineGenerator, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -10174,122 +18009,230 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('phase', node) - if value is not None and 'phase' not in already_processed: - already_processed.add('phase') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("phase", node) + if value is not None and "phase" not in already_processed: + already_processed.add("phase") self.phase = value - self.validate_Nml2Quantity_none(self.phase) # validate type Nml2Quantity_none - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_none( + self.phase + ) # validate type Nml2Quantity_none + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('amplitude', node) - if value is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("amplitude", node) + if value is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") self.amplitude = value - self.validate_Nml2Quantity_current(self.amplitude) # validate type Nml2Quantity_current - value = find_attr_value_('period', node) - if value is not None and 'period' not in already_processed: - already_processed.add('period') + self.validate_Nml2Quantity_current( + self.amplitude + ) # validate type Nml2Quantity_current + value = find_attr_value_("period", node) + if value is not None and "period" not in already_processed: + already_processed.add("period") self.period = value - self.validate_Nml2Quantity_time(self.period) # validate type Nml2Quantity_time + self.validate_Nml2Quantity_time( + self.period + ) # validate type Nml2Quantity_time super(SineGenerator, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(SineGenerator, self).buildChildren(child_, node, nodeName_, True) pass + + # end class SineGenerator class PulseGeneratorDL(Standalone): """Generates a constant current pulse of a certain amplitude (non dimensional) for a specified duration after a delay.""" + member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_("delay", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("duration", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("amplitude", "Nml2Quantity_none", 0, 0, {"use": u"required"}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, amplitude=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + duration=None, + amplitude=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(PulseGeneratorDL, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(PulseGeneratorDL, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) self.duration = _cast(None, duration) self.amplitude = _cast(None, amplitude) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, PulseGeneratorDL) + subclass = getSubclassFromModule_(CurrentSubclassModule_, PulseGeneratorDL) if subclass is not None: return subclass(*args_, **kwargs_) if PulseGeneratorDL.subclass: return PulseGeneratorDL.subclass(*args_, **kwargs_) else: return PulseGeneratorDL(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$"] + ] + def hasContent_(self): - if ( - super(PulseGeneratorDL, self).hasContent_() - ): + if super(PulseGeneratorDL, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PulseGeneratorDL', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('PulseGeneratorDL') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PulseGeneratorDL", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PulseGeneratorDL") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGeneratorDL') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PulseGeneratorDL", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PulseGeneratorDL', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PulseGeneratorDL", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PulseGeneratorDL'): - super(PulseGeneratorDL, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGeneratorDL') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.amplitude is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') - outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PulseGeneratorDL', fromsubclass_=False, pretty_print=True): - super(PulseGeneratorDL, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PulseGeneratorDL", + ): + super(PulseGeneratorDL, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PulseGeneratorDL", + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write(" delay=%s" % (quote_attrib(self.delay),)) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write(" duration=%s" % (quote_attrib(self.duration),)) + if self.amplitude is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") + outfile.write(" amplitude=%s" % (quote_attrib(self.amplitude),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PulseGeneratorDL", + fromsubclass_=False, + pretty_print=True, + ): + super(PulseGeneratorDL, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -10297,112 +18240,208 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('amplitude', node) - if value is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("amplitude", node) + if value is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") self.amplitude = value - self.validate_Nml2Quantity_none(self.amplitude) # validate type Nml2Quantity_none + self.validate_Nml2Quantity_none( + self.amplitude + ) # validate type Nml2Quantity_none super(PulseGeneratorDL, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(PulseGeneratorDL, self).buildChildren(child_, node, nodeName_, True) pass + + # end class PulseGeneratorDL class PulseGenerator(Standalone): """Generates a constant current pulse of a certain amplitude (with dimensions for current) for a specified duration after a delay.""" + member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_("delay", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("duration", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("amplitude", "Nml2Quantity_current", 0, 0, {"use": u"required"}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, amplitude=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + duration=None, + amplitude=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(PulseGenerator, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(PulseGenerator, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) self.duration = _cast(None, duration) self.amplitude = _cast(None, amplitude) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, PulseGenerator) + subclass = getSubclassFromModule_(CurrentSubclassModule_, PulseGenerator) if subclass is not None: return subclass(*args_, **kwargs_) if PulseGenerator.subclass: return PulseGenerator.subclass(*args_, **kwargs_) else: return PulseGenerator(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] + self.validate_Nml2Quantity_current_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_current_patterns_, + ) + ) + + validate_Nml2Quantity_current_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$"] + ] + def hasContent_(self): - if ( - super(PulseGenerator, self).hasContent_() - ): + if super(PulseGenerator, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PulseGenerator', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('PulseGenerator') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PulseGenerator", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PulseGenerator") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGenerator') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PulseGenerator" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PulseGenerator', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PulseGenerator", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PulseGenerator'): - super(PulseGenerator, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGenerator') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.amplitude is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') - outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PulseGenerator', fromsubclass_=False, pretty_print=True): - super(PulseGenerator, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PulseGenerator", + ): + super(PulseGenerator, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PulseGenerator" + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write(" delay=%s" % (quote_attrib(self.delay),)) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write(" duration=%s" % (quote_attrib(self.duration),)) + if self.amplitude is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") + outfile.write(" amplitude=%s" % (quote_attrib(self.amplitude),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PulseGenerator", + fromsubclass_=False, + pretty_print=True, + ): + super(PulseGenerator, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -10410,103 +18449,198 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('amplitude', node) - if value is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("amplitude", node) + if value is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") self.amplitude = value - self.validate_Nml2Quantity_current(self.amplitude) # validate type Nml2Quantity_current + self.validate_Nml2Quantity_current( + self.amplitude + ) # validate type Nml2Quantity_current super(PulseGenerator, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(PulseGenerator, self).buildChildren(child_, node, nodeName_, True) pass + + # end class PulseGenerator class ReactionScheme(Base): member_data_items_ = [ - MemberSpec_('source', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('type', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_("source", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("type", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_( + "__ANY__", + "__ANY__", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"processContents": u"skip", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, source=None, type=None, anytypeobjs_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + source=None, + type=None, + anytypeobjs_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ReactionScheme, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ReactionScheme, self).__init__(neuro_lex_id, id, **kwargs_) self.source = _cast(None, source) self.type = _cast(None, type) if anytypeobjs_ is None: self.anytypeobjs_ = [] else: self.anytypeobjs_ = anytypeobjs_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ReactionScheme) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ReactionScheme) if subclass is not None: return subclass(*args_, **kwargs_) if ReactionScheme.subclass: return ReactionScheme.subclass(*args_, **kwargs_) else: return ReactionScheme(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - self.anytypeobjs_ or - super(ReactionScheme, self).hasContent_() - ): + if self.anytypeobjs_ or super(ReactionScheme, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReactionScheme', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReactionScheme') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ReactionScheme", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ReactionScheme") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReactionScheme') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ReactionScheme" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ReactionScheme', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ReactionScheme", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ReactionScheme'): - super(ReactionScheme, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReactionScheme') - if self.source is not None and 'source' not in already_processed: - already_processed.add('source') - outfile.write(' source=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.source), input_name='source')), )) - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReactionScheme', fromsubclass_=False, pretty_print=True): - super(ReactionScheme, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ReactionScheme", + ): + super(ReactionScheme, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ReactionScheme" + ) + if self.source is not None and "source" not in already_processed: + already_processed.add("source") + outfile.write( + " source=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.source), input_name="source" + ) + ), + ) + ) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ReactionScheme", + fromsubclass_=False, + pretty_print=True, + ): + super(ReactionScheme, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for obj_ in self.anytypeobjs_: obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -10514,88 +18648,169 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('source', node) - if value is not None and 'source' not in already_processed: - already_processed.add('source') + value = find_attr_value_("source", node) + if value is not None and "source" not in already_processed: + already_processed.add("source") self.source = value - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value super(ReactionScheme, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'ReactionScheme') + obj_ = self.gds_build_any(child_, "ReactionScheme") if obj_ is not None: self.add_anytypeobjs_(obj_) super(ReactionScheme, self).buildChildren(child_, node, nodeName_, True) + + # end class ReactionScheme class ExtracellularProperties(Base): member_data_items_ = [ - MemberSpec_('species', 'Species', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Species', u'name': u'species', u'minOccurs': u'0'}, None), + MemberSpec_( + "species", + "Species", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Species", + u"name": u"species", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base + def __init__(self, neuro_lex_id=None, id=None, species=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ExtracellularProperties, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ExtracellularProperties, self).__init__(neuro_lex_id, id, **kwargs_) if species is None: self.species = [] else: self.species = species + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExtracellularProperties) + CurrentSubclassModule_, ExtracellularProperties + ) if subclass is not None: return subclass(*args_, **kwargs_) if ExtracellularProperties.subclass: return ExtracellularProperties.subclass(*args_, **kwargs_) else: return ExtracellularProperties(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - self.species or - super(ExtracellularProperties, self).hasContent_() - ): + if self.species or super(ExtracellularProperties, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtracellularProperties', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtracellularProperties') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExtracellularProperties", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExtracellularProperties") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtracellularProperties') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ExtracellularProperties", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtracellularProperties', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExtracellularProperties", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtracellularProperties'): - super(ExtracellularProperties, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtracellularProperties') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtracellularProperties', fromsubclass_=False, pretty_print=True): - super(ExtracellularProperties, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExtracellularProperties", + ): + super(ExtracellularProperties, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ExtracellularProperties", + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExtracellularProperties", + fromsubclass_=False, + pretty_print=True, + ): + super(ExtracellularProperties, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for species_ in self.species: - species_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='species', pretty_print=pretty_print) + species_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="species", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -10603,15 +18818,23 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - super(ExtracellularProperties, self).buildAttributes(node, attrs, already_processed) + super(ExtracellularProperties, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'species': + if nodeName_ == "species": obj_ = Species.factory(parent_object_=self) obj_.build(child_) self.species.append(obj_) - obj_.original_tagname_ = 'species' - super(ExtracellularProperties, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "species" + super(ExtracellularProperties, self).buildChildren( + child_, node, nodeName_, True + ) + + # end class ExtracellularProperties @@ -10625,96 +18848,194 @@ class ChannelDensityGHK2(Base): should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove.""" + member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('cond_density', 'Nml2Quantity_conductanceDensity', 0, 1, {'use': u'optional'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_("ion_channel", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "cond_density", + "Nml2Quantity_conductanceDensity", + 0, + 1, + {"use": u"optional"}, + ), + MemberSpec_("segment_groups", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_("segments", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": u"required"}), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, segment_groups='all', segments=None, ion=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + cond_density=None, + segment_groups="all", + segments=None, + ion=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityGHK2, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ChannelDensityGHK2, self).__init__(neuro_lex_id, id, **kwargs_) self.ion_channel = _cast(None, ion_channel) self.cond_density = _cast(None, cond_density) self.segment_groups = _cast(None, segment_groups) self.segments = _cast(None, segments) self.ion = _cast(None, ion) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityGHK2) + CurrentSubclassModule_, ChannelDensityGHK2 + ) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityGHK2.subclass: return ChannelDensityGHK2.subclass(*args_, **kwargs_) else: return ChannelDensityGHK2(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_conductanceDensity(self, value): # Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductanceDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) - validate_Nml2Quantity_conductanceDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$']] + self.validate_Nml2Quantity_conductanceDensity_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductanceDensity_patterns_, + ) + ) + + validate_Nml2Quantity_conductanceDensity_patterns_ = [ + [ + u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$" + ] + ] + def hasContent_(self): - if ( - super(ChannelDensityGHK2, self).hasContent_() - ): + if super(ChannelDensityGHK2, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityGHK2', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityGHK2') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityGHK2", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityGHK2") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK2') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityGHK2", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityGHK2', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityGHK2'): - super(ChannelDensityGHK2, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK2') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.cond_density is not None and 'cond_density' not in already_processed: - already_processed.add('cond_density') - outfile.write(' condDensity=%s' % (quote_attrib(self.cond_density), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityGHK2', fromsubclass_=False, pretty_print=True): - super(ChannelDensityGHK2, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityGHK2", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityGHK2", + ): + super(ChannelDensityGHK2, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityGHK2", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write(" ionChannel=%s" % (quote_attrib(self.ion_channel),)) + if self.cond_density is not None and "cond_density" not in already_processed: + already_processed.add("cond_density") + outfile.write(" condDensity=%s" % (quote_attrib(self.cond_density),)) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write(" segmentGroup=%s" % (quote_attrib(self.segment_groups),)) + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write(" segment=%s" % (quote_attrib(self.segments),)) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write(" ion=%s" % (quote_attrib(self.ion),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityGHK2", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityGHK2, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -10722,36 +19043,42 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('condDensity', node) - if value is not None and 'condDensity' not in already_processed: - already_processed.add('condDensity') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("condDensity", node) + if value is not None and "condDensity" not in already_processed: + already_processed.add("condDensity") self.cond_density = value - self.validate_Nml2Quantity_conductanceDensity(self.cond_density) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_conductanceDensity( + self.cond_density + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') + self.validate_NmlId(self.segment_groups) # validate type NmlId + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") self.segments = value - self.validate_NmlId(self.segments) # validate type NmlId - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_NmlId(self.segments) # validate type NmlId + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId + self.validate_NmlId(self.ion) # validate type NmlId super(ChannelDensityGHK2, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ChannelDensityGHK2, self).buildChildren(child_, node, nodeName_, True) pass + + # end class ChannelDensityGHK2 @@ -10765,96 +19092,188 @@ class ChannelDensityGHK(Base): should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove.""" + member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('permeability', 'Nml2Quantity_permeability', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_("ion_channel", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "permeability", "Nml2Quantity_permeability", 0, 0, {"use": u"required"} + ), + MemberSpec_("segment_groups", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_("segments", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": u"required"}), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, permeability=None, segment_groups='all', segments=None, ion=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + permeability=None, + segment_groups="all", + segments=None, + ion=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityGHK, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ChannelDensityGHK, self).__init__(neuro_lex_id, id, **kwargs_) self.ion_channel = _cast(None, ion_channel) self.permeability = _cast(None, permeability) self.segment_groups = _cast(None, segment_groups) self.segments = _cast(None, segments) self.ion = _cast(None, ion) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityGHK) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ChannelDensityGHK) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityGHK.subclass: return ChannelDensityGHK.subclass(*args_, **kwargs_) else: return ChannelDensityGHK(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_permeability(self, value): # Validate type Nml2Quantity_permeability, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_permeability_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_permeability_patterns_, )) - validate_Nml2Quantity_permeability_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m_per_s| um_per_ms|cm_per_s|cm_per_ms)$']] + self.validate_Nml2Quantity_permeability_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_permeability_patterns_, + ) + ) + + validate_Nml2Quantity_permeability_patterns_ = [ + [ + u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m_per_s| um_per_ms|cm_per_s|cm_per_ms)$" + ] + ] + def hasContent_(self): - if ( - super(ChannelDensityGHK, self).hasContent_() - ): + if super(ChannelDensityGHK, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityGHK', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityGHK') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityGHK", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityGHK") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityGHK", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityGHK', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityGHK'): - super(ChannelDensityGHK, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.permeability is not None and 'permeability' not in already_processed: - already_processed.add('permeability') - outfile.write(' permeability=%s' % (quote_attrib(self.permeability), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityGHK', fromsubclass_=False, pretty_print=True): - super(ChannelDensityGHK, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityGHK", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityGHK", + ): + super(ChannelDensityGHK, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityGHK", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write(" ionChannel=%s" % (quote_attrib(self.ion_channel),)) + if self.permeability is not None and "permeability" not in already_processed: + already_processed.add("permeability") + outfile.write(" permeability=%s" % (quote_attrib(self.permeability),)) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write(" segmentGroup=%s" % (quote_attrib(self.segment_groups),)) + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write(" segment=%s" % (quote_attrib(self.segments),)) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write(" ion=%s" % (quote_attrib(self.ion),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityGHK", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityGHK, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -10862,36 +19281,42 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('permeability', node) - if value is not None and 'permeability' not in already_processed: - already_processed.add('permeability') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("permeability", node) + if value is not None and "permeability" not in already_processed: + already_processed.add("permeability") self.permeability = value - self.validate_Nml2Quantity_permeability(self.permeability) # validate type Nml2Quantity_permeability - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_permeability( + self.permeability + ) # validate type Nml2Quantity_permeability + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') + self.validate_NmlId(self.segment_groups) # validate type NmlId + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") self.segments = value - self.validate_NmlId(self.segments) # validate type NmlId - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_NmlId(self.segments) # validate type NmlId + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId + self.validate_NmlId(self.ion) # validate type NmlId super(ChannelDensityGHK, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ChannelDensityGHK, self).buildChildren(child_, node, nodeName_, True) pass + + # end class ChannelDensityGHK @@ -10905,20 +19330,54 @@ class ChannelDensityNernst(Base): should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove.""" + member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('cond_density', 'Nml2Quantity_conductanceDensity', 0, 1, {'use': u'optional'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_("ion_channel", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "cond_density", + "Nml2Quantity_conductanceDensity", + 0, + 1, + {"use": u"optional"}, + ), + MemberSpec_("segment_groups", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_("segments", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "variable_parameters", + "VariableParameter", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"VariableParameter", + u"name": u"variableParameter", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + cond_density=None, + segment_groups="all", + segments=None, + ion=None, + variable_parameters=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNernst, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ChannelDensityNernst, self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.ion_channel = _cast(None, ion_channel) self.cond_density = _cast(None, cond_density) self.segment_groups = _cast(None, segment_groups) @@ -10929,89 +19388,175 @@ def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=No else: self.variable_parameters = variable_parameters self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityNernst) + CurrentSubclassModule_, ChannelDensityNernst + ) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityNernst.subclass: return ChannelDensityNernst.subclass(*args_, **kwargs_) else: return ChannelDensityNernst(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_conductanceDensity(self, value): # Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductanceDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) - validate_Nml2Quantity_conductanceDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$']] + self.validate_Nml2Quantity_conductanceDensity_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductanceDensity_patterns_, + ) + ) + + validate_Nml2Quantity_conductanceDensity_patterns_ = [ + [ + u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$" + ] + ] + def hasContent_(self): - if ( - self.variable_parameters or - super(ChannelDensityNernst, self).hasContent_() - ): + if self.variable_parameters or super(ChannelDensityNernst, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNernst', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNernst') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityNernst", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityNernst") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernst') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNernst", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNernst', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityNernst", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNernst'): - super(ChannelDensityNernst, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernst') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.cond_density is not None and 'cond_density' not in already_processed: - already_processed.add('cond_density') - outfile.write(' condDensity=%s' % (quote_attrib(self.cond_density), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityNernst", + ): + super(ChannelDensityNernst, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNernst", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write(" ionChannel=%s" % (quote_attrib(self.ion_channel),)) + if self.cond_density is not None and "cond_density" not in already_processed: + already_processed.add("cond_density") + outfile.write(" condDensity=%s" % (quote_attrib(self.cond_density),)) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write(" segmentGroup=%s" % (quote_attrib(self.segment_groups),)) + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write(" segment=%s" % (quote_attrib(self.segments),)) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write(" ion=%s" % (quote_attrib(self.ion),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNernst', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNernst, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityNernst", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityNernst, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for variableParameter_ in self.variable_parameters: - variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) + variableParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="variableParameter", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -11019,44 +19564,52 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('condDensity', node) - if value is not None and 'condDensity' not in already_processed: - already_processed.add('condDensity') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("condDensity", node) + if value is not None and "condDensity" not in already_processed: + already_processed.add("condDensity") self.cond_density = value - self.validate_Nml2Quantity_conductanceDensity(self.cond_density) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_conductanceDensity( + self.cond_density + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') + self.validate_NmlId(self.segment_groups) # validate type NmlId + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") self.segments = value - self.validate_NmlId(self.segments) # validate type NmlId - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_NmlId(self.segments) # validate type NmlId + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.ion) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(ChannelDensityNernst, self).buildAttributes(node, attrs, already_processed) + super(ChannelDensityNernst, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'variableParameter': + if nodeName_ == "variableParameter": obj_ = VariableParameter.factory(parent_object_=self) obj_.build(child_) self.variable_parameters.append(obj_) - obj_.original_tagname_ = 'variableParameter' + obj_.original_tagname_ = "variableParameter" super(ChannelDensityNernst, self).buildChildren(child_, node, nodeName_, True) + + # end class ChannelDensityNernst @@ -11070,21 +19623,56 @@ class ChannelDensity(Base): should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove.""" + member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('cond_density', 'Nml2Quantity_conductanceDensity', 0, 1, {'use': u'optional'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NonNegativeInteger', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_("ion_channel", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "cond_density", + "Nml2Quantity_conductanceDensity", + 0, + 1, + {"use": u"optional"}, + ), + MemberSpec_("erev", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("segment_groups", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_("segments", "NonNegativeInteger", 0, 1, {"use": u"optional"}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "variable_parameters", + "VariableParameter", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"VariableParameter", + u"name": u"variableParameter", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, erev=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + cond_density=None, + erev=None, + segment_groups="all", + segments=None, + ion=None, + variable_parameters=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensity, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ChannelDensity, self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.ion_channel = _cast(None, ion_channel) self.cond_density = _cast(None, cond_density) self.erev = _cast(None, erev) @@ -11096,103 +19684,191 @@ def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=No else: self.variable_parameters = variable_parameters self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensity) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ChannelDensity) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensity.subclass: return ChannelDensity.subclass(*args_, **kwargs_) else: return ChannelDensity(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_conductanceDensity(self, value): # Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductanceDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) - validate_Nml2Quantity_conductanceDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$']] + self.validate_Nml2Quantity_conductanceDensity_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductanceDensity_patterns_, + ) + ) + + validate_Nml2Quantity_conductanceDensity_patterns_ = [ + [ + u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$" + ] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_: pass + def hasContent_(self): - if ( - self.variable_parameters or - super(ChannelDensity, self).hasContent_() - ): + if self.variable_parameters or super(ChannelDensity, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensity', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensity') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensity", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensity") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensity') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ChannelDensity" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensity', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensity", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensity'): - super(ChannelDensity, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensity') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.cond_density is not None and 'cond_density' not in already_processed: - already_processed.add('cond_density') - outfile.write(' condDensity=%s' % (quote_attrib(self.cond_density), )) - if self.erev is not None and 'erev' not in already_processed: - already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensity", + ): + super(ChannelDensity, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ChannelDensity" + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write(" ionChannel=%s" % (quote_attrib(self.ion_channel),)) + if self.cond_density is not None and "cond_density" not in already_processed: + already_processed.add("cond_density") + outfile.write(" condDensity=%s" % (quote_attrib(self.cond_density),)) + if self.erev is not None and "erev" not in already_processed: + already_processed.add("erev") + outfile.write(" erev=%s" % (quote_attrib(self.erev),)) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write(" segmentGroup=%s" % (quote_attrib(self.segment_groups),)) + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write(" segment=%s" % (quote_attrib(self.segments),)) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write(" ion=%s" % (quote_attrib(self.ion),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensity', fromsubclass_=False, pretty_print=True): - super(ChannelDensity, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensity", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensity, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for variableParameter_ in self.variable_parameters: - variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) + variableParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="variableParameter", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -11200,54 +19876,64 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('condDensity', node) - if value is not None and 'condDensity' not in already_processed: - already_processed.add('condDensity') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("condDensity", node) + if value is not None and "condDensity" not in already_processed: + already_processed.add("condDensity") self.cond_density = value - self.validate_Nml2Quantity_conductanceDensity(self.cond_density) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('erev', node) - if value is not None and 'erev' not in already_processed: - already_processed.add('erev') + self.validate_Nml2Quantity_conductanceDensity( + self.cond_density + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("erev", node) + if value is not None and "erev" not in already_processed: + already_processed.add("erev") self.erev = value - self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_voltage( + self.erev + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') + self.validate_NmlId(self.segment_groups) # validate type NmlId + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") try: self.segments = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.segments < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.segments + ) # validate type NonNegativeInteger + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.ion) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(ChannelDensity, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'variableParameter': + if nodeName_ == "variableParameter": obj_ = VariableParameter.factory(parent_object_=self) obj_.build(child_) self.variable_parameters.append(obj_) - obj_.original_tagname_ = 'variableParameter' + obj_.original_tagname_ = "variableParameter" super(ChannelDensity, self).buildChildren(child_, node, nodeName_, True) + + # end class ChannelDensity @@ -11261,86 +19947,184 @@ class ChannelDensityNonUniformGHK(Base): should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove.""" + member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_("ion_channel", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "variable_parameters", + "VariableParameter", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"VariableParameter", + u"name": u"variableParameter", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, ion=None, variable_parameters=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + ion=None, + variable_parameters=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNonUniformGHK, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ChannelDensityNonUniformGHK, self).__init__(neuro_lex_id, id, **kwargs_) self.ion_channel = _cast(None, ion_channel) self.ion = _cast(None, ion) if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityNonUniformGHK) + CurrentSubclassModule_, ChannelDensityNonUniformGHK + ) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityNonUniformGHK.subclass: return ChannelDensityNonUniformGHK.subclass(*args_, **kwargs_) else: return ChannelDensityNonUniformGHK(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): if ( - self.variable_parameters or - super(ChannelDensityNonUniformGHK, self).hasContent_() + self.variable_parameters + or super(ChannelDensityNonUniformGHK, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniformGHK', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNonUniformGHK') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityNonUniformGHK", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityNonUniformGHK") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformGHK') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNonUniformGHK", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNonUniformGHK', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityNonUniformGHK", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNonUniformGHK'): - super(ChannelDensityNonUniformGHK, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformGHK') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniformGHK', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNonUniformGHK, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityNonUniformGHK", + ): + super(ChannelDensityNonUniformGHK, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNonUniformGHK", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write(" ionChannel=%s" % (quote_attrib(self.ion_channel),)) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write(" ion=%s" % (quote_attrib(self.ion),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityNonUniformGHK", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityNonUniformGHK, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for variableParameter_ in self.variable_parameters: - variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) + variableParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="variableParameter", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -11348,25 +20132,33 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityNonUniformGHK, self).buildAttributes(node, attrs, already_processed) + self.validate_NmlId(self.ion) # validate type NmlId + super(ChannelDensityNonUniformGHK, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'variableParameter': + if nodeName_ == "variableParameter": obj_ = VariableParameter.factory(parent_object_=self) obj_.build(child_) self.variable_parameters.append(obj_) - obj_.original_tagname_ = 'variableParameter' - super(ChannelDensityNonUniformGHK, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "variableParameter" + super(ChannelDensityNonUniformGHK, self).buildChildren( + child_, node, nodeName_, True + ) + + # end class ChannelDensityNonUniformGHK @@ -11380,86 +20172,188 @@ class ChannelDensityNonUniformNernst(Base): should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove.""" + member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_("ion_channel", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "variable_parameters", + "VariableParameter", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"VariableParameter", + u"name": u"variableParameter", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, ion=None, variable_parameters=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + ion=None, + variable_parameters=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNonUniformNernst, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ChannelDensityNonUniformNernst, self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.ion_channel = _cast(None, ion_channel) self.ion = _cast(None, ion) if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityNonUniformNernst) + CurrentSubclassModule_, ChannelDensityNonUniformNernst + ) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityNonUniformNernst.subclass: return ChannelDensityNonUniformNernst.subclass(*args_, **kwargs_) else: return ChannelDensityNonUniformNernst(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): if ( - self.variable_parameters or - super(ChannelDensityNonUniformNernst, self).hasContent_() + self.variable_parameters + or super(ChannelDensityNonUniformNernst, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniformNernst', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNonUniformNernst') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityNonUniformNernst", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get( + "ChannelDensityNonUniformNernst" + ) if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformNernst') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNonUniformNernst", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNonUniformNernst', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityNonUniformNernst", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNonUniformNernst'): - super(ChannelDensityNonUniformNernst, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformNernst') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniformNernst', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNonUniformNernst, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityNonUniformNernst", + ): + super(ChannelDensityNonUniformNernst, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNonUniformNernst", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write(" ionChannel=%s" % (quote_attrib(self.ion_channel),)) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write(" ion=%s" % (quote_attrib(self.ion),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityNonUniformNernst", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityNonUniformNernst, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for variableParameter_ in self.variable_parameters: - variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) + variableParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="variableParameter", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -11467,25 +20361,33 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityNonUniformNernst, self).buildAttributes(node, attrs, already_processed) + self.validate_NmlId(self.ion) # validate type NmlId + super(ChannelDensityNonUniformNernst, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'variableParameter': + if nodeName_ == "variableParameter": obj_ = VariableParameter.factory(parent_object_=self) obj_.build(child_) self.variable_parameters.append(obj_) - obj_.original_tagname_ = 'variableParameter' - super(ChannelDensityNonUniformNernst, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "variableParameter" + super(ChannelDensityNonUniformNernst, self).buildChildren( + child_, node, nodeName_, True + ) + + # end class ChannelDensityNonUniformNernst @@ -11499,18 +20401,41 @@ class ChannelDensityNonUniform(Base): should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove.""" + member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_("ion_channel", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("erev", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "variable_parameters", + "VariableParameter", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"VariableParameter", + u"name": u"variableParameter", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, erev=None, ion=None, variable_parameters=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + erev=None, + ion=None, + variable_parameters=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNonUniform, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ChannelDensityNonUniform, self).__init__(neuro_lex_id, id, **kwargs_) self.ion_channel = _cast(None, ion_channel) self.erev = _cast(None, erev) self.ion = _cast(None, ion) @@ -11518,79 +20443,166 @@ def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, erev=None, ion= self.variable_parameters = [] else: self.variable_parameters = variable_parameters + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityNonUniform) + CurrentSubclassModule_, ChannelDensityNonUniform + ) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityNonUniform.subclass: return ChannelDensityNonUniform.subclass(*args_, **kwargs_) else: return ChannelDensityNonUniform(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def hasContent_(self): if ( - self.variable_parameters or - super(ChannelDensityNonUniform, self).hasContent_() + self.variable_parameters + or super(ChannelDensityNonUniform, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniform', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNonUniform') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityNonUniform", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityNonUniform") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniform') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNonUniform", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNonUniform', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityNonUniform", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNonUniform'): - super(ChannelDensityNonUniform, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniform') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.erev is not None and 'erev' not in already_processed: - already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniform', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNonUniform, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityNonUniform", + ): + super(ChannelDensityNonUniform, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNonUniform", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write(" ionChannel=%s" % (quote_attrib(self.ion_channel),)) + if self.erev is not None and "erev" not in already_processed: + already_processed.add("erev") + outfile.write(" erev=%s" % (quote_attrib(self.erev),)) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write(" ion=%s" % (quote_attrib(self.ion),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityNonUniform", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityNonUniform, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for variableParameter_ in self.variable_parameters: - variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) + variableParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="variableParameter", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -11598,30 +20610,40 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('erev', node) - if value is not None and 'erev' not in already_processed: - already_processed.add('erev') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("erev", node) + if value is not None and "erev" not in already_processed: + already_processed.add("erev") self.erev = value - self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_Nml2Quantity_voltage( + self.erev + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityNonUniform, self).buildAttributes(node, attrs, already_processed) + self.validate_NmlId(self.ion) # validate type NmlId + super(ChannelDensityNonUniform, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'variableParameter': + if nodeName_ == "variableParameter": obj_ = VariableParameter.factory(parent_object_=self) obj_.build(child_) self.variable_parameters.append(obj_) - obj_.original_tagname_ = 'variableParameter' - super(ChannelDensityNonUniform, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "variableParameter" + super(ChannelDensityNonUniform, self).buildChildren( + child_, node, nodeName_, True + ) + + # end class ChannelDensityNonUniform @@ -11635,21 +20657,47 @@ class ChannelPopulation(Base): should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove.""" + member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('number', 'NonNegativeInteger', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NonNegativeInteger', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_("ion_channel", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("number", "NonNegativeInteger", 0, 0, {"use": u"required"}), + MemberSpec_("erev", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("segment_groups", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_("segments", "NonNegativeInteger", 0, 1, {"use": u"optional"}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "variable_parameters", + "VariableParameter", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"VariableParameter", + u"name": u"variableParameter", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, number=None, erev=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + number=None, + erev=None, + segment_groups="all", + segments=None, + ion=None, + variable_parameters=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelPopulation, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ChannelPopulation, self).__init__(neuro_lex_id, id, **kwargs_) self.ion_channel = _cast(None, ion_channel) self.number = _cast(int, number) self.erev = _cast(None, erev) @@ -11660,92 +20708,175 @@ def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, number=None, er self.variable_parameters = [] else: self.variable_parameters = variable_parameters + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelPopulation) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ChannelPopulation) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelPopulation.subclass: return ChannelPopulation.subclass(*args_, **kwargs_) else: return ChannelPopulation(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_: pass + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def hasContent_(self): - if ( - self.variable_parameters or - super(ChannelPopulation, self).hasContent_() - ): + if self.variable_parameters or super(ChannelPopulation, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelPopulation', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelPopulation') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelPopulation", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelPopulation") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelPopulation') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelPopulation", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelPopulation', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelPopulation", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelPopulation'): - super(ChannelPopulation, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelPopulation') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.number is not None and 'number' not in already_processed: - already_processed.add('number') - outfile.write(' number=%s' % (quote_attrib(self.number), )) - if self.erev is not None and 'erev' not in already_processed: - already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelPopulation', fromsubclass_=False, pretty_print=True): - super(ChannelPopulation, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelPopulation", + ): + super(ChannelPopulation, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelPopulation", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write(" ionChannel=%s" % (quote_attrib(self.ion_channel),)) + if self.number is not None and "number" not in already_processed: + already_processed.add("number") + outfile.write(" number=%s" % (quote_attrib(self.number),)) + if self.erev is not None and "erev" not in already_processed: + already_processed.add("erev") + outfile.write(" erev=%s" % (quote_attrib(self.erev),)) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write(" segmentGroup=%s" % (quote_attrib(self.segment_groups),)) + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write(" segment=%s" % (quote_attrib(self.segments),)) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write(" ion=%s" % (quote_attrib(self.ion),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelPopulation", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelPopulation, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for variableParameter_ in self.variable_parameters: - variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) + variableParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="variableParameter", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -11753,131 +20884,270 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('number', node) - if value is not None and 'number' not in already_processed: - already_processed.add('number') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("number", node) + if value is not None and "number" not in already_processed: + already_processed.add("number") try: self.number = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.number < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.number) # validate type NonNegativeInteger - value = find_attr_value_('erev', node) - if value is not None and 'erev' not in already_processed: - already_processed.add('erev') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.number + ) # validate type NonNegativeInteger + value = find_attr_value_("erev", node) + if value is not None and "erev" not in already_processed: + already_processed.add("erev") self.erev = value - self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_voltage( + self.erev + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') + self.validate_NmlId(self.segment_groups) # validate type NmlId + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") try: self.segments = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.segments < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.segments + ) # validate type NonNegativeInteger + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId + self.validate_NmlId(self.ion) # validate type NmlId super(ChannelPopulation, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'variableParameter': + if nodeName_ == "variableParameter": obj_ = VariableParameter.factory(parent_object_=self) obj_.build(child_) self.variable_parameters.append(obj_) - obj_.original_tagname_ = 'variableParameter' + obj_.original_tagname_ = "variableParameter" super(ChannelPopulation, self).buildChildren(child_, node, nodeName_, True) + + # end class ChannelPopulation class BiophysicalProperties2CaPools(Standalone): """Standalone element which is usually inside a single cell, but could be outside and referenced by id.""" + member_data_items_ = [ - MemberSpec_('membrane_properties2_ca_pools', 'MembraneProperties2CaPools', 0, 0, {u'type': u'MembraneProperties2CaPools', u'name': u'membraneProperties2CaPools'}, None), - MemberSpec_('intracellular_properties2_ca_pools', 'IntracellularProperties2CaPools', 0, 1, {u'type': u'IntracellularProperties2CaPools', u'name': u'intracellularProperties2CaPools', u'minOccurs': u'0'}, None), - MemberSpec_('extracellular_properties', 'ExtracellularProperties', 0, 1, {u'type': u'ExtracellularProperties', u'name': u'extracellularProperties', u'minOccurs': u'0'}, None), + MemberSpec_( + "membrane_properties2_ca_pools", + "MembraneProperties2CaPools", + 0, + 0, + { + u"type": u"MembraneProperties2CaPools", + u"name": u"membraneProperties2CaPools", + }, + None, + ), + MemberSpec_( + "intracellular_properties2_ca_pools", + "IntracellularProperties2CaPools", + 0, + 1, + { + u"type": u"IntracellularProperties2CaPools", + u"name": u"intracellularProperties2CaPools", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "extracellular_properties", + "ExtracellularProperties", + 0, + 1, + { + u"type": u"ExtracellularProperties", + u"name": u"extracellularProperties", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, membrane_properties2_ca_pools=None, intracellular_properties2_ca_pools=None, extracellular_properties=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + membrane_properties2_ca_pools=None, + intracellular_properties2_ca_pools=None, + extracellular_properties=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BiophysicalProperties2CaPools, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BiophysicalProperties2CaPools, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.membrane_properties2_ca_pools = membrane_properties2_ca_pools self.intracellular_properties2_ca_pools = intracellular_properties2_ca_pools self.extracellular_properties = extracellular_properties + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BiophysicalProperties2CaPools) + CurrentSubclassModule_, BiophysicalProperties2CaPools + ) if subclass is not None: return subclass(*args_, **kwargs_) if BiophysicalProperties2CaPools.subclass: return BiophysicalProperties2CaPools.subclass(*args_, **kwargs_) else: return BiophysicalProperties2CaPools(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.membrane_properties2_ca_pools is not None or - self.intracellular_properties2_ca_pools is not None or - self.extracellular_properties is not None or - super(BiophysicalProperties2CaPools, self).hasContent_() + self.membrane_properties2_ca_pools is not None + or self.intracellular_properties2_ca_pools is not None + or self.extracellular_properties is not None + or super(BiophysicalProperties2CaPools, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BiophysicalProperties2CaPools', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BiophysicalProperties2CaPools') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BiophysicalProperties2CaPools", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BiophysicalProperties2CaPools") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties2CaPools') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BiophysicalProperties2CaPools", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BiophysicalProperties2CaPools', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BiophysicalProperties2CaPools", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BiophysicalProperties2CaPools'): - super(BiophysicalProperties2CaPools, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties2CaPools') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BiophysicalProperties2CaPools', fromsubclass_=False, pretty_print=True): - super(BiophysicalProperties2CaPools, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BiophysicalProperties2CaPools", + ): + super(BiophysicalProperties2CaPools, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BiophysicalProperties2CaPools", + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BiophysicalProperties2CaPools", + fromsubclass_=False, + pretty_print=True, + ): + super(BiophysicalProperties2CaPools, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.membrane_properties2_ca_pools is not None: - self.membrane_properties2_ca_pools.export(outfile, level, namespaceprefix_, namespacedef_='', name_='membraneProperties2CaPools', pretty_print=pretty_print) + self.membrane_properties2_ca_pools.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="membraneProperties2CaPools", + pretty_print=pretty_print, + ) if self.intracellular_properties2_ca_pools is not None: - self.intracellular_properties2_ca_pools.export(outfile, level, namespaceprefix_, namespacedef_='', name_='intracellularProperties2CaPools', pretty_print=pretty_print) + self.intracellular_properties2_ca_pools.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="intracellularProperties2CaPools", + pretty_print=pretty_print, + ) if self.extracellular_properties is not None: - self.extracellular_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='extracellularProperties', pretty_print=pretty_print) + self.extracellular_properties.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="extracellularProperties", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -11885,101 +21155,235 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - super(BiophysicalProperties2CaPools, self).buildAttributes(node, attrs, already_processed) + super(BiophysicalProperties2CaPools, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'membraneProperties2CaPools': + if nodeName_ == "membraneProperties2CaPools": obj_ = MembraneProperties2CaPools.factory(parent_object_=self) obj_.build(child_) self.membrane_properties2_ca_pools = obj_ - obj_.original_tagname_ = 'membraneProperties2CaPools' - elif nodeName_ == 'intracellularProperties2CaPools': + obj_.original_tagname_ = "membraneProperties2CaPools" + elif nodeName_ == "intracellularProperties2CaPools": obj_ = IntracellularProperties2CaPools.factory(parent_object_=self) obj_.build(child_) self.intracellular_properties2_ca_pools = obj_ - obj_.original_tagname_ = 'intracellularProperties2CaPools' - elif nodeName_ == 'extracellularProperties': + obj_.original_tagname_ = "intracellularProperties2CaPools" + elif nodeName_ == "extracellularProperties": obj_ = ExtracellularProperties.factory(parent_object_=self) obj_.build(child_) self.extracellular_properties = obj_ - obj_.original_tagname_ = 'extracellularProperties' - super(BiophysicalProperties2CaPools, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "extracellularProperties" + super(BiophysicalProperties2CaPools, self).buildChildren( + child_, node, nodeName_, True + ) + + # end class BiophysicalProperties2CaPools class BiophysicalProperties(Standalone): """Standalone element which is usually inside a single cell, but could be outside and referenced by id.""" + member_data_items_ = [ - MemberSpec_('membrane_properties', 'MembraneProperties', 0, 0, {u'type': u'MembraneProperties', u'name': u'membraneProperties'}, None), - MemberSpec_('intracellular_properties', 'IntracellularProperties', 0, 1, {u'type': u'IntracellularProperties', u'name': u'intracellularProperties', u'minOccurs': u'0'}, None), - MemberSpec_('extracellular_properties', 'ExtracellularProperties', 0, 1, {u'type': u'ExtracellularProperties', u'name': u'extracellularProperties', u'minOccurs': u'0'}, None), + MemberSpec_( + "membrane_properties", + "MembraneProperties", + 0, + 0, + {u"type": u"MembraneProperties", u"name": u"membraneProperties"}, + None, + ), + MemberSpec_( + "intracellular_properties", + "IntracellularProperties", + 0, + 1, + { + u"type": u"IntracellularProperties", + u"name": u"intracellularProperties", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "extracellular_properties", + "ExtracellularProperties", + 0, + 1, + { + u"type": u"ExtracellularProperties", + u"name": u"extracellularProperties", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, membrane_properties=None, intracellular_properties=None, extracellular_properties=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + membrane_properties=None, + intracellular_properties=None, + extracellular_properties=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BiophysicalProperties, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BiophysicalProperties, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.membrane_properties = membrane_properties self.intracellular_properties = intracellular_properties self.extracellular_properties = extracellular_properties + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BiophysicalProperties) + CurrentSubclassModule_, BiophysicalProperties + ) if subclass is not None: return subclass(*args_, **kwargs_) if BiophysicalProperties.subclass: return BiophysicalProperties.subclass(*args_, **kwargs_) else: return BiophysicalProperties(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.membrane_properties is not None or - self.intracellular_properties is not None or - self.extracellular_properties is not None or - super(BiophysicalProperties, self).hasContent_() + self.membrane_properties is not None + or self.intracellular_properties is not None + or self.extracellular_properties is not None + or super(BiophysicalProperties, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BiophysicalProperties', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BiophysicalProperties') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BiophysicalProperties", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BiophysicalProperties") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BiophysicalProperties", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BiophysicalProperties', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BiophysicalProperties", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BiophysicalProperties'): - super(BiophysicalProperties, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BiophysicalProperties', fromsubclass_=False, pretty_print=True): - super(BiophysicalProperties, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BiophysicalProperties", + ): + super(BiophysicalProperties, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BiophysicalProperties", + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BiophysicalProperties", + fromsubclass_=False, + pretty_print=True, + ): + super(BiophysicalProperties, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.membrane_properties is not None: - self.membrane_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='membraneProperties', pretty_print=pretty_print) + self.membrane_properties.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="membraneProperties", + pretty_print=pretty_print, + ) if self.intracellular_properties is not None: - self.intracellular_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='intracellularProperties', pretty_print=pretty_print) + self.intracellular_properties.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="intracellularProperties", + pretty_print=pretty_print, + ) if self.extracellular_properties is not None: - self.extracellular_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='extracellularProperties', pretty_print=pretty_print) + self.extracellular_properties.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="extracellularProperties", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -11987,118 +21391,235 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - super(BiophysicalProperties, self).buildAttributes(node, attrs, already_processed) + super(BiophysicalProperties, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'membraneProperties': + if nodeName_ == "membraneProperties": class_obj_ = self.get_class_obj_(child_, MembraneProperties) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.membrane_properties = obj_ - obj_.original_tagname_ = 'membraneProperties' - elif nodeName_ == 'intracellularProperties': + obj_.original_tagname_ = "membraneProperties" + elif nodeName_ == "intracellularProperties": class_obj_ = self.get_class_obj_(child_, IntracellularProperties) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.intracellular_properties = obj_ - obj_.original_tagname_ = 'intracellularProperties' - elif nodeName_ == 'extracellularProperties': + obj_.original_tagname_ = "intracellularProperties" + elif nodeName_ == "extracellularProperties": obj_ = ExtracellularProperties.factory(parent_object_=self) obj_.build(child_) self.extracellular_properties = obj_ - obj_.original_tagname_ = 'extracellularProperties' + obj_.original_tagname_ = "extracellularProperties" super(BiophysicalProperties, self).buildChildren(child_, node, nodeName_, True) + + # end class BiophysicalProperties class InhomogeneousParameter(Base): member_data_items_ = [ - MemberSpec_('variable', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('metric', 'Metric', 0, 0, {'use': u'required'}), - MemberSpec_('proximal', 'ProximalDetails', 0, 1, {u'type': u'ProximalDetails', u'name': u'proximal', u'minOccurs': u'0'}, None), - MemberSpec_('distal', 'DistalDetails', 0, 1, {u'type': u'DistalDetails', u'name': u'distal', u'minOccurs': u'0'}, None), + MemberSpec_("variable", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("metric", "Metric", 0, 0, {"use": u"required"}), + MemberSpec_( + "proximal", + "ProximalDetails", + 0, + 1, + {u"type": u"ProximalDetails", u"name": u"proximal", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "distal", + "DistalDetails", + 0, + 1, + {u"type": u"DistalDetails", u"name": u"distal", u"minOccurs": u"0"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, variable=None, metric=None, proximal=None, distal=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + variable=None, + metric=None, + proximal=None, + distal=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(InhomogeneousParameter, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(InhomogeneousParameter, self).__init__(neuro_lex_id, id, **kwargs_) self.variable = _cast(None, variable) self.metric = _cast(None, metric) self.proximal = proximal self.distal = distal + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, InhomogeneousParameter) + CurrentSubclassModule_, InhomogeneousParameter + ) if subclass is not None: return subclass(*args_, **kwargs_) if InhomogeneousParameter.subclass: return InhomogeneousParameter.subclass(*args_, **kwargs_) else: return InhomogeneousParameter(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Metric(self, value): # Validate type Metric, a restriction on xs:string. if value is not None and Validate_simpletypes_: value = str(value) - enumerations = ['Path Length from root'] + enumerations = ["Path Length from root"] enumeration_respectee = False for enum in enumerations: if value == enum: enumeration_respectee = True break if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on Metric' % {"value" : value.encode("utf-8")} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd enumeration restriction on Metric' + % {"value": value.encode("utf-8")} + ) + def hasContent_(self): if ( - self.proximal is not None or - self.distal is not None or - super(InhomogeneousParameter, self).hasContent_() + self.proximal is not None + or self.distal is not None + or super(InhomogeneousParameter, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InhomogeneousParameter', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InhomogeneousParameter') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InhomogeneousParameter", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InhomogeneousParameter") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InhomogeneousParameter') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="InhomogeneousParameter", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InhomogeneousParameter', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InhomogeneousParameter", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InhomogeneousParameter'): - super(InhomogeneousParameter, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InhomogeneousParameter') - if self.variable is not None and 'variable' not in already_processed: - already_processed.add('variable') - outfile.write(' variable=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.variable), input_name='variable')), )) - if self.metric is not None and 'metric' not in already_processed: - already_processed.add('metric') - outfile.write(' metric=%s' % (quote_attrib(self.metric), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InhomogeneousParameter', fromsubclass_=False, pretty_print=True): - super(InhomogeneousParameter, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="InhomogeneousParameter", + ): + super(InhomogeneousParameter, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="InhomogeneousParameter", + ) + if self.variable is not None and "variable" not in already_processed: + already_processed.add("variable") + outfile.write( + " variable=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.variable), input_name="variable" + ) + ), + ) + ) + if self.metric is not None and "metric" not in already_processed: + already_processed.add("metric") + outfile.write(" metric=%s" % (quote_attrib(self.metric),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InhomogeneousParameter", + fromsubclass_=False, + pretty_print=True, + ): + super(InhomogeneousParameter, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.proximal is not None: - self.proximal.export(outfile, level, namespaceprefix_, namespacedef_='', name_='proximal', pretty_print=pretty_print) + self.proximal.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="proximal", + pretty_print=pretty_print, + ) if self.distal is not None: - self.distal.export(outfile, level, namespaceprefix_, namespacedef_='', name_='distal', pretty_print=pretty_print) + self.distal.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="distal", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -12106,49 +21627,155 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('variable', node) - if value is not None and 'variable' not in already_processed: - already_processed.add('variable') + value = find_attr_value_("variable", node) + if value is not None and "variable" not in already_processed: + already_processed.add("variable") self.variable = value - value = find_attr_value_('metric', node) - if value is not None and 'metric' not in already_processed: - already_processed.add('metric') + value = find_attr_value_("metric", node) + if value is not None and "metric" not in already_processed: + already_processed.add("metric") self.metric = value - self.validate_Metric(self.metric) # validate type Metric - super(InhomogeneousParameter, self).buildAttributes(node, attrs, already_processed) + self.validate_Metric(self.metric) # validate type Metric + super(InhomogeneousParameter, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'proximal': + if nodeName_ == "proximal": obj_ = ProximalDetails.factory(parent_object_=self) obj_.build(child_) self.proximal = obj_ - obj_.original_tagname_ = 'proximal' - elif nodeName_ == 'distal': + obj_.original_tagname_ = "proximal" + elif nodeName_ == "distal": obj_ = DistalDetails.factory(parent_object_=self) obj_.build(child_) self.distal = obj_ - obj_.original_tagname_ = 'distal' + obj_.original_tagname_ = "distal" super(InhomogeneousParameter, self).buildChildren(child_, node, nodeName_, True) + + # end class InhomogeneousParameter class SegmentGroup(Base): member_data_items_ = [ - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('properties', 'Property', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Property', u'name': u'property', u'minOccurs': u'0'}, None), - MemberSpec_('annotation', 'Annotation', 0, 1, {u'type': u'Annotation', u'name': u'annotation', u'minOccurs': u'0'}, None), - MemberSpec_('members', 'Member', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Member', u'name': u'member', u'minOccurs': u'0'}, None), - MemberSpec_('includes', 'Include', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Include', u'name': u'include', u'minOccurs': u'0'}, None), - MemberSpec_('paths', 'Path', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Path', u'name': u'path', u'minOccurs': u'0'}, None), - MemberSpec_('sub_trees', 'SubTree', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SubTree', u'name': u'subTree', u'minOccurs': u'0'}, None), - MemberSpec_('inhomogeneous_parameters', 'InhomogeneousParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InhomogeneousParameter', u'name': u'inhomogeneousParameter', u'minOccurs': u'0'}, None), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {u"type": u"xs:string", u"name": u"notes", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "properties", + "Property", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Property", + u"name": u"property", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "annotation", + "Annotation", + 0, + 1, + {u"type": u"Annotation", u"name": u"annotation", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "members", + "Member", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Member", + u"name": u"member", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "includes", + "Include", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Include", + u"name": u"include", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "paths", + "Path", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Path", + u"name": u"path", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "sub_trees", + "SubTree", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SubTree", + u"name": u"subTree", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "inhomogeneous_parameters", + "InhomogeneousParameter", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"InhomogeneousParameter", + u"name": u"inhomogeneousParameter", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, notes=None, properties=None, annotation=None, members=None, includes=None, paths=None, sub_trees=None, inhomogeneous_parameters=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + notes=None, + properties=None, + annotation=None, + members=None, + includes=None, + paths=None, + sub_trees=None, + inhomogeneous_parameters=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SegmentGroup, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(SegmentGroup, self).__init__(neuro_lex_id, id, **kwargs_) self.notes = notes self.validate_Notes(self.notes) if properties is None: @@ -12176,82 +21803,194 @@ def __init__(self, neuro_lex_id=None, id=None, notes=None, properties=None, anno self.inhomogeneous_parameters = [] else: self.inhomogeneous_parameters = inhomogeneous_parameters + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SegmentGroup) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SegmentGroup) if subclass is not None: return subclass(*args_, **kwargs_) if SegmentGroup.subclass: return SegmentGroup.subclass(*args_, **kwargs_) else: return SegmentGroup(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): # Validate type Notes, a restriction on xs:string. if value is not None and Validate_simpletypes_: pass + def hasContent_(self): if ( - self.notes is not None or - self.properties or - self.annotation is not None or - self.members or - self.includes or - self.paths or - self.sub_trees or - self.inhomogeneous_parameters or - super(SegmentGroup, self).hasContent_() + self.notes is not None + or self.properties + or self.annotation is not None + or self.members + or self.includes + or self.paths + or self.sub_trees + or self.inhomogeneous_parameters + or super(SegmentGroup, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentGroup', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SegmentGroup') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SegmentGroup", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SegmentGroup") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentGroup') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SegmentGroup" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SegmentGroup', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SegmentGroup", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SegmentGroup'): - super(SegmentGroup, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentGroup') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentGroup', fromsubclass_=False, pretty_print=True): - super(SegmentGroup, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SegmentGroup", + ): + super(SegmentGroup, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SegmentGroup" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SegmentGroup", + fromsubclass_=False, + pretty_print=True, + ): + super(SegmentGroup, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) for property_ in self.properties: - property_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='property', pretty_print=pretty_print) + property_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="property", + pretty_print=pretty_print, + ) if self.annotation is not None: - self.annotation.export(outfile, level, namespaceprefix_, namespacedef_='', name_='annotation', pretty_print=pretty_print) + self.annotation.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="annotation", + pretty_print=pretty_print, + ) for member_ in self.members: - member_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='member', pretty_print=pretty_print) + member_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="member", + pretty_print=pretty_print, + ) for include_ in self.includes: - include_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='include', pretty_print=pretty_print) + include_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="include", + pretty_print=pretty_print, + ) for path_ in self.paths: - path_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='path', pretty_print=pretty_print) + path_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="path", + pretty_print=pretty_print, + ) for subTree_ in self.sub_trees: - subTree_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='subTree', pretty_print=pretty_print) + subTree_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="subTree", + pretty_print=pretty_print, + ) for inhomogeneousParameter_ in self.inhomogeneous_parameters: - inhomogeneousParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='inhomogeneousParameter', pretty_print=pretty_print) + inhomogeneousParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="inhomogeneousParameter", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -12259,140 +21998,256 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(SegmentGroup, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': + if nodeName_ == "notes": notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') + notes_ = self.gds_validate_string(notes_, node, "notes") self.notes = notes_ # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'property': + elif nodeName_ == "property": obj_ = Property.factory(parent_object_=self) obj_.build(child_) self.properties.append(obj_) - obj_.original_tagname_ = 'property' - elif nodeName_ == 'annotation': + obj_.original_tagname_ = "property" + elif nodeName_ == "annotation": obj_ = Annotation.factory(parent_object_=self) obj_.build(child_) self.annotation = obj_ - obj_.original_tagname_ = 'annotation' - elif nodeName_ == 'member': + obj_.original_tagname_ = "annotation" + elif nodeName_ == "member": obj_ = Member.factory(parent_object_=self) obj_.build(child_) self.members.append(obj_) - obj_.original_tagname_ = 'member' - elif nodeName_ == 'include': + obj_.original_tagname_ = "member" + elif nodeName_ == "include": obj_ = Include.factory(parent_object_=self) obj_.build(child_) self.includes.append(obj_) - obj_.original_tagname_ = 'include' - elif nodeName_ == 'path': + obj_.original_tagname_ = "include" + elif nodeName_ == "path": obj_ = Path.factory(parent_object_=self) obj_.build(child_) self.paths.append(obj_) - obj_.original_tagname_ = 'path' - elif nodeName_ == 'subTree': + obj_.original_tagname_ = "path" + elif nodeName_ == "subTree": obj_ = SubTree.factory(parent_object_=self) obj_.build(child_) self.sub_trees.append(obj_) - obj_.original_tagname_ = 'subTree' - elif nodeName_ == 'inhomogeneousParameter': + obj_.original_tagname_ = "subTree" + elif nodeName_ == "inhomogeneousParameter": obj_ = InhomogeneousParameter.factory(parent_object_=self) obj_.build(child_) self.inhomogeneous_parameters.append(obj_) - obj_.original_tagname_ = 'inhomogeneousParameter' + obj_.original_tagname_ = "inhomogeneousParameter" super(SegmentGroup, self).buildChildren(child_, node, nodeName_, True) - def __str__(self): - return "SegmentGroup: "+str(self.id)+", "+str(len(self.members))+" member(s), "+str(len(self.includes))+" included group(s)" + return ( + "SegmentGroup: " + + str(self.id) + + ", " + + str(len(self.members)) + + " member(s), " + + str(len(self.includes)) + + " included group(s)" + ) def __repr__(self): return str(self) + # end class SegmentGroup class Segment(BaseNonNegativeIntegerId): member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('parent', 'SegmentParent', 0, 1, {u'type': u'SegmentParent', u'name': u'parent', u'minOccurs': u'0'}, None), - MemberSpec_('proximal', 'Point3DWithDiam', 0, 1, {u'type': u'Point3DWithDiam', u'name': u'proximal', u'minOccurs': u'0'}, None), - MemberSpec_('distal', 'Point3DWithDiam', 0, 0, {u'type': u'Point3DWithDiam', u'name': u'distal', u'minOccurs': u'1'}, None), + MemberSpec_("name", "xs:string", 0, 1, {"use": u"optional"}), + MemberSpec_( + "parent", + "SegmentParent", + 0, + 1, + {u"type": u"SegmentParent", u"name": u"parent", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "proximal", + "Point3DWithDiam", + 0, + 1, + {u"type": u"Point3DWithDiam", u"name": u"proximal", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "distal", + "Point3DWithDiam", + 0, + 0, + {u"type": u"Point3DWithDiam", u"name": u"distal", u"minOccurs": u"1"}, + None, + ), ] subclass = None superclass = BaseNonNegativeIntegerId - def __init__(self, neuro_lex_id=None, id=None, name=None, parent=None, proximal=None, distal=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + name=None, + parent=None, + proximal=None, + distal=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Segment, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Segment, self).__init__(neuro_lex_id, id, **kwargs_) self.name = _cast(None, name) self.parent = parent self.proximal = proximal self.distal = distal + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Segment) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Segment) if subclass is not None: return subclass(*args_, **kwargs_) if Segment.subclass: return Segment.subclass(*args_, **kwargs_) else: return Segment(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.parent is not None or - self.proximal is not None or - self.distal is not None or - super(Segment, self).hasContent_() + self.parent is not None + or self.proximal is not None + or self.distal is not None + or super(Segment, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Segment', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Segment') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Segment", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Segment") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Segment') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Segment" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Segment', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Segment", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Segment'): - super(Segment, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Segment') - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Segment', fromsubclass_=False, pretty_print=True): - super(Segment, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Segment" + ): + super(Segment, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Segment" + ) + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Segment", + fromsubclass_=False, + pretty_print=True, + ): + super(Segment, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.parent is not None: - self.parent.export(outfile, level, namespaceprefix_, namespacedef_='', name_='parent', pretty_print=pretty_print) + self.parent.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="parent", + pretty_print=pretty_print, + ) if self.proximal is not None: - self.proximal.export(outfile, level, namespaceprefix_, namespacedef_='', name_='proximal', pretty_print=pretty_print) + self.proximal.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="proximal", + pretty_print=pretty_print, + ) if self.distal is not None: - self.distal.export(outfile, level, namespaceprefix_, namespacedef_='', name_='distal', pretty_print=pretty_print) + self.distal.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="distal", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -12400,29 +22255,32 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value super(Segment, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'parent': + if nodeName_ == "parent": obj_ = SegmentParent.factory(parent_object_=self) obj_.build(child_) self.parent = obj_ - obj_.original_tagname_ = 'parent' - elif nodeName_ == 'proximal': + obj_.original_tagname_ = "parent" + elif nodeName_ == "proximal": obj_ = Point3DWithDiam.factory(parent_object_=self) obj_.build(child_) self.proximal = obj_ - obj_.original_tagname_ = 'proximal' - elif nodeName_ == 'distal': + obj_.original_tagname_ = "proximal" + elif nodeName_ == "distal": obj_ = Point3DWithDiam.factory(parent_object_=self) obj_.build(child_) self.distal = obj_ - obj_.original_tagname_ = 'distal' + obj_.original_tagname_ = "distal" super(Segment, self).buildChildren(child_, node, nodeName_, True) + @property def length(self): """Get the length of the segment. @@ -12431,8 +22289,12 @@ def length(self): :rtype: float """ - if self.proximal==None: - raise Exception('Cannot get length of segment '+str(self.id)+' using the length property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_length(segment_id) on the cell instead.') + if self.proximal == None: + raise Exception( + "Cannot get length of segment " + + str(self.id) + + " using the length property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_length(segment_id) on the cell instead." + ) prox_x = self.proximal.x prox_y = self.proximal.y @@ -12442,13 +22304,20 @@ def length(self): dist_y = self.distal.y dist_z = self.distal.z - length = ((prox_x-dist_x)**2 + (prox_y-dist_y)**2 + (prox_z-dist_z)**2)**(0.5) + length = ( + (prox_x - dist_x) ** 2 + (prox_y - dist_y) ** 2 + (prox_z - dist_z) ** 2 + ) ** (0.5) return length def __str__(self): - return "" + return ( + "" + ) def __repr__(self): @@ -12463,25 +22332,40 @@ def volume(self): """ from math import pi - if self.proximal==None: - raise Exception('Cannot get volume of segment '+str(self.id)+' using the volume property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_volume(segment_id) on the cell instead.') - prox_rad = self.proximal.diameter/2.0 - dist_rad = self.distal.diameter/2.0 + if self.proximal == None: + raise Exception( + "Cannot get volume of segment " + + str(self.id) + + " using the volume property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_volume(segment_id) on the cell instead." + ) + + prox_rad = self.proximal.diameter / 2.0 + dist_rad = self.distal.diameter / 2.0 - if self.proximal.x == self.distal.x and self.proximal.y == self.distal.y and self.proximal.z == self.distal.z: + if ( + self.proximal.x == self.distal.x + and self.proximal.y == self.distal.y + and self.proximal.z == self.distal.z + ): - if prox_rad!=dist_rad: - raise Exception('Cannot get volume of segment '+str(self.id)+'. The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the volume calculation ambiguous.') + if prox_rad != dist_rad: + raise Exception( + "Cannot get volume of segment " + + str(self.id) + + ". The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the volume calculation ambiguous." + ) - return 4.0/3 * pi * prox_rad**3 + return 4.0 / 3 * pi * prox_rad ** 3 length = self.length - volume = (pi/3)*length*(prox_rad**2+dist_rad**2+prox_rad*dist_rad) + volume = ( + (pi / 3) * length * (prox_rad ** 2 + dist_rad ** 2 + prox_rad * dist_rad) + ) return volume - + @property def surface_area(self): """Get the surface area of the segment. @@ -12492,40 +22376,89 @@ def surface_area(self): from math import pi from math import sqrt - if self.proximal==None: - raise Exception('Cannot get surface area of segment '+str(self.id)+' using the surface_area property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_surface_area(segment_id) on the cell instead.') + if self.proximal == None: + raise Exception( + "Cannot get surface area of segment " + + str(self.id) + + " using the surface_area property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_surface_area(segment_id) on the cell instead." + ) - prox_rad = self.proximal.diameter/2.0 - dist_rad = self.distal.diameter/2.0 + prox_rad = self.proximal.diameter / 2.0 + dist_rad = self.distal.diameter / 2.0 - if self.proximal.x == self.distal.x and self.proximal.y == self.distal.y and self.proximal.z == self.distal.z: + if ( + self.proximal.x == self.distal.x + and self.proximal.y == self.distal.y + and self.proximal.z == self.distal.z + ): - if prox_rad!=dist_rad: - raise Exception('Cannot get surface area of segment '+str(self.id)+'. The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the surface area calculation ambiguous.') + if prox_rad != dist_rad: + raise Exception( + "Cannot get surface area of segment " + + str(self.id) + + ". The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the surface area calculation ambiguous." + ) - return 4.0 * pi * prox_rad**2 + return 4.0 * pi * prox_rad ** 2 length = self.length - surface_area = pi*(prox_rad+dist_rad)*sqrt((prox_rad-dist_rad)**2+length**2) + surface_area = ( + pi * (prox_rad + dist_rad) * sqrt((prox_rad - dist_rad) ** 2 + length ** 2) + ) return surface_area + # end class Segment class Morphology(Standalone): """Standalone element which is usually inside a single cell, but could be outside and referenced by id.""" + member_data_items_ = [ - MemberSpec_('segments', 'Segment', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'Segment', u'name': u'segment'}, None), - MemberSpec_('segment_groups', 'SegmentGroup', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SegmentGroup', u'name': u'segmentGroup', u'minOccurs': u'0'}, None), + MemberSpec_( + "segments", + "Segment", + 1, + 0, + {u"maxOccurs": u"unbounded", u"type": u"Segment", u"name": u"segment"}, + None, + ), + MemberSpec_( + "segment_groups", + "SegmentGroup", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SegmentGroup", + u"name": u"segmentGroup", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, segments=None, segment_groups=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + segments=None, + segment_groups=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Morphology, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Morphology, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) if segments is None: self.segments = [] else: @@ -12534,59 +22467,118 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti self.segment_groups = [] else: self.segment_groups = segment_groups + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Morphology) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Morphology) if subclass is not None: return subclass(*args_, **kwargs_) if Morphology.subclass: return Morphology.subclass(*args_, **kwargs_) else: return Morphology(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.segments or - self.segment_groups or - super(Morphology, self).hasContent_() + self.segments + or self.segment_groups + or super(Morphology, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Morphology', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Morphology') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Morphology", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Morphology") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Morphology') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Morphology" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Morphology', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Morphology", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Morphology'): - super(Morphology, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Morphology') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Morphology', fromsubclass_=False, pretty_print=True): - super(Morphology, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Morphology" + ): + super(Morphology, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Morphology" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Morphology", + fromsubclass_=False, + pretty_print=True, + ): + super(Morphology, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for segment_ in self.segments: - segment_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='segment', pretty_print=pretty_print) + segment_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="segment", + pretty_print=pretty_print, + ) for segmentGroup_ in self.segment_groups: - segmentGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='segmentGroup', pretty_print=pretty_print) + segmentGroup_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="segmentGroup", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -12594,20 +22586,23 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(Morphology, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'segment': + if nodeName_ == "segment": obj_ = Segment.factory(parent_object_=self) obj_.build(child_) self.segments.append(obj_) - obj_.original_tagname_ = 'segment' - elif nodeName_ == 'segmentGroup': + obj_.original_tagname_ = "segment" + elif nodeName_ == "segmentGroup": obj_ = SegmentGroup.factory(parent_object_=self) obj_.build(child_) self.segment_groups.append(obj_) - obj_.original_tagname_ = 'segmentGroup' + obj_.original_tagname_ = "segmentGroup" super(Morphology, self).buildChildren(child_, node, nodeName_, True) + @property def num_segments(self): """Get the number of segments included in this cell morphology. @@ -12616,66 +22611,130 @@ def num_segments(self): :rtype: int """ return len(self.segments) + + # end class Morphology class BaseCell(Standalone): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BaseCell, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BaseCell) if subclass is not None: return subclass(*args_, **kwargs_) if BaseCell.subclass: return BaseCell.subclass(*args_, **kwargs_) else: return BaseCell(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(BaseCell, self).hasContent_() - ): + if super(BaseCell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCell') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseCell" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseCell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseCell'): - super(BaseCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCell') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="BaseCell" + ): + super(BaseCell, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseCell" + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCell', fromsubclass_=False, pretty_print=True): - super(BaseCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseCell", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseCell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -12683,75 +22742,146 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(BaseCell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(BaseCell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class BaseCell class BaseSynapse(Standalone): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BaseSynapse, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BaseSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if BaseSynapse.subclass: return BaseSynapse.subclass(*args_, **kwargs_) else: return BaseSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(BaseSynapse, self).hasContent_() - ): + if super(BaseSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseSynapse') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseSynapse" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseSynapse'): - super(BaseSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseSynapse') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseSynapse", + ): + super(BaseSynapse, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseSynapse" + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseSynapse', fromsubclass_=False, pretty_print=True): - super(BaseSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -12759,119 +22889,247 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(BaseSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(BaseSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class BaseSynapse class FixedFactorConcentrationModel(Standalone): """Should not be required, as it's present on the species element!""" + member_data_items_ = [ - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('resting_conc', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('decay_constant', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('rho', 'Nml2Quantity_rhoFactor', 0, 0, {'use': u'required'}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "resting_conc", "Nml2Quantity_concentration", 0, 0, {"use": u"required"} + ), + MemberSpec_("decay_constant", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("rho", "Nml2Quantity_rhoFactor", 0, 0, {"use": u"required"}), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, ion=None, resting_conc=None, decay_constant=None, rho=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + ion=None, + resting_conc=None, + decay_constant=None, + rho=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(FixedFactorConcentrationModel, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(FixedFactorConcentrationModel, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.ion = _cast(None, ion) self.resting_conc = _cast(None, resting_conc) self.decay_constant = _cast(None, decay_constant) self.rho = _cast(None, rho) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, FixedFactorConcentrationModel) + CurrentSubclassModule_, FixedFactorConcentrationModel + ) if subclass is not None: return subclass(*args_, **kwargs_) if FixedFactorConcentrationModel.subclass: return FixedFactorConcentrationModel.subclass(*args_, **kwargs_) else: return FixedFactorConcentrationModel(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_concentration(self, value): # Validate type Nml2Quantity_concentration, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_concentration_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_concentration_patterns_, )) - validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] + self.validate_Nml2Quantity_concentration_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_concentration_patterns_, + ) + ) + + validate_Nml2Quantity_concentration_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$"] + ] + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_rhoFactor(self, value): # Validate type Nml2Quantity_rhoFactor, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_rhoFactor_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_rhoFactor_patterns_, )) - validate_Nml2Quantity_rhoFactor_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m_per_A_per_s|mol_per_cm_per_uA_per_ms)$']] + self.validate_Nml2Quantity_rhoFactor_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_rhoFactor_patterns_, + ) + ) + + validate_Nml2Quantity_rhoFactor_patterns_ = [ + [ + u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m_per_A_per_s|mol_per_cm_per_uA_per_ms)$" + ] + ] + def hasContent_(self): - if ( - super(FixedFactorConcentrationModel, self).hasContent_() - ): + if super(FixedFactorConcentrationModel, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FixedFactorConcentrationModel', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('FixedFactorConcentrationModel') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FixedFactorConcentrationModel", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("FixedFactorConcentrationModel") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FixedFactorConcentrationModel') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="FixedFactorConcentrationModel", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FixedFactorConcentrationModel', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="FixedFactorConcentrationModel", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FixedFactorConcentrationModel'): - super(FixedFactorConcentrationModel, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FixedFactorConcentrationModel') - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - if self.resting_conc is not None and 'resting_conc' not in already_processed: - already_processed.add('resting_conc') - outfile.write(' restingConc=%s' % (quote_attrib(self.resting_conc), )) - if self.decay_constant is not None and 'decay_constant' not in already_processed: - already_processed.add('decay_constant') - outfile.write(' decayConstant=%s' % (quote_attrib(self.decay_constant), )) - if self.rho is not None and 'rho' not in already_processed: - already_processed.add('rho') - outfile.write(' rho=%s' % (quote_attrib(self.rho), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FixedFactorConcentrationModel', fromsubclass_=False, pretty_print=True): - super(FixedFactorConcentrationModel, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="FixedFactorConcentrationModel", + ): + super(FixedFactorConcentrationModel, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="FixedFactorConcentrationModel", + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write(" ion=%s" % (quote_attrib(self.ion),)) + if self.resting_conc is not None and "resting_conc" not in already_processed: + already_processed.add("resting_conc") + outfile.write(" restingConc=%s" % (quote_attrib(self.resting_conc),)) + if ( + self.decay_constant is not None + and "decay_constant" not in already_processed + ): + already_processed.add("decay_constant") + outfile.write(" decayConstant=%s" % (quote_attrib(self.decay_constant),)) + if self.rho is not None and "rho" not in already_processed: + already_processed.add("rho") + outfile.write(" rho=%s" % (quote_attrib(self.rho),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FixedFactorConcentrationModel", + fromsubclass_=False, + pretty_print=True, + ): + super(FixedFactorConcentrationModel, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -12879,140 +23137,291 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - value = find_attr_value_('restingConc', node) - if value is not None and 'restingConc' not in already_processed: - already_processed.add('restingConc') + self.validate_NmlId(self.ion) # validate type NmlId + value = find_attr_value_("restingConc", node) + if value is not None and "restingConc" not in already_processed: + already_processed.add("restingConc") self.resting_conc = value - self.validate_Nml2Quantity_concentration(self.resting_conc) # validate type Nml2Quantity_concentration - value = find_attr_value_('decayConstant', node) - if value is not None and 'decayConstant' not in already_processed: - already_processed.add('decayConstant') + self.validate_Nml2Quantity_concentration( + self.resting_conc + ) # validate type Nml2Quantity_concentration + value = find_attr_value_("decayConstant", node) + if value is not None and "decayConstant" not in already_processed: + already_processed.add("decayConstant") self.decay_constant = value - self.validate_Nml2Quantity_time(self.decay_constant) # validate type Nml2Quantity_time - value = find_attr_value_('rho', node) - if value is not None and 'rho' not in already_processed: - already_processed.add('rho') + self.validate_Nml2Quantity_time( + self.decay_constant + ) # validate type Nml2Quantity_time + value = find_attr_value_("rho", node) + if value is not None and "rho" not in already_processed: + already_processed.add("rho") self.rho = value - self.validate_Nml2Quantity_rhoFactor(self.rho) # validate type Nml2Quantity_rhoFactor - super(FixedFactorConcentrationModel, self).buildAttributes(node, attrs, already_processed) + self.validate_Nml2Quantity_rhoFactor( + self.rho + ) # validate type Nml2Quantity_rhoFactor + super(FixedFactorConcentrationModel, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(FixedFactorConcentrationModel, self).buildChildren(child_, node, nodeName_, True) + super(FixedFactorConcentrationModel, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class FixedFactorConcentrationModel class DecayingPoolConcentrationModel(Standalone): """Should not be required, as it's present on the species element!""" + member_data_items_ = [ - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('resting_conc', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('decay_constant', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('shell_thickness', 'Nml2Quantity_length', 0, 0, {'use': u'required'}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "resting_conc", "Nml2Quantity_concentration", 0, 0, {"use": u"required"} + ), + MemberSpec_("decay_constant", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_( + "shell_thickness", "Nml2Quantity_length", 0, 0, {"use": u"required"} + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, ion=None, resting_conc=None, decay_constant=None, shell_thickness=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + ion=None, + resting_conc=None, + decay_constant=None, + shell_thickness=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(DecayingPoolConcentrationModel, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(DecayingPoolConcentrationModel, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.ion = _cast(None, ion) self.resting_conc = _cast(None, resting_conc) self.decay_constant = _cast(None, decay_constant) self.shell_thickness = _cast(None, shell_thickness) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, DecayingPoolConcentrationModel) + CurrentSubclassModule_, DecayingPoolConcentrationModel + ) if subclass is not None: return subclass(*args_, **kwargs_) if DecayingPoolConcentrationModel.subclass: return DecayingPoolConcentrationModel.subclass(*args_, **kwargs_) else: return DecayingPoolConcentrationModel(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_concentration(self, value): # Validate type Nml2Quantity_concentration, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_concentration_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_concentration_patterns_, )) - validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] + self.validate_Nml2Quantity_concentration_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_concentration_patterns_, + ) + ) + + validate_Nml2Quantity_concentration_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$"] + ] + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_length(self, value): # Validate type Nml2Quantity_length, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_length_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_length_patterns_, )) - validate_Nml2Quantity_length_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m|cm|um)$']] + self.validate_Nml2Quantity_length_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_length_patterns_, + ) + ) + + validate_Nml2Quantity_length_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m|cm|um)$"] + ] + def hasContent_(self): - if ( - super(DecayingPoolConcentrationModel, self).hasContent_() - ): + if super(DecayingPoolConcentrationModel, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DecayingPoolConcentrationModel', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('DecayingPoolConcentrationModel') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DecayingPoolConcentrationModel", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get( + "DecayingPoolConcentrationModel" + ) if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DecayingPoolConcentrationModel') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="DecayingPoolConcentrationModel", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DecayingPoolConcentrationModel', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="DecayingPoolConcentrationModel", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DecayingPoolConcentrationModel'): - super(DecayingPoolConcentrationModel, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DecayingPoolConcentrationModel') - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - if self.resting_conc is not None and 'resting_conc' not in already_processed: - already_processed.add('resting_conc') - outfile.write(' restingConc=%s' % (quote_attrib(self.resting_conc), )) - if self.decay_constant is not None and 'decay_constant' not in already_processed: - already_processed.add('decay_constant') - outfile.write(' decayConstant=%s' % (quote_attrib(self.decay_constant), )) - if self.shell_thickness is not None and 'shell_thickness' not in already_processed: - already_processed.add('shell_thickness') - outfile.write(' shellThickness=%s' % (quote_attrib(self.shell_thickness), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="DecayingPoolConcentrationModel", + ): + super(DecayingPoolConcentrationModel, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="DecayingPoolConcentrationModel", + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write(" ion=%s" % (quote_attrib(self.ion),)) + if self.resting_conc is not None and "resting_conc" not in already_processed: + already_processed.add("resting_conc") + outfile.write(" restingConc=%s" % (quote_attrib(self.resting_conc),)) + if ( + self.decay_constant is not None + and "decay_constant" not in already_processed + ): + already_processed.add("decay_constant") + outfile.write(" decayConstant=%s" % (quote_attrib(self.decay_constant),)) + if ( + self.shell_thickness is not None + and "shell_thickness" not in already_processed + ): + already_processed.add("shell_thickness") + outfile.write(" shellThickness=%s" % (quote_attrib(self.shell_thickness),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DecayingPoolConcentrationModel', fromsubclass_=False, pretty_print=True): - super(DecayingPoolConcentrationModel, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DecayingPoolConcentrationModel", + fromsubclass_=False, + pretty_print=True, + ): + super(DecayingPoolConcentrationModel, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -13020,132 +23429,298 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - value = find_attr_value_('restingConc', node) - if value is not None and 'restingConc' not in already_processed: - already_processed.add('restingConc') + self.validate_NmlId(self.ion) # validate type NmlId + value = find_attr_value_("restingConc", node) + if value is not None and "restingConc" not in already_processed: + already_processed.add("restingConc") self.resting_conc = value - self.validate_Nml2Quantity_concentration(self.resting_conc) # validate type Nml2Quantity_concentration - value = find_attr_value_('decayConstant', node) - if value is not None and 'decayConstant' not in already_processed: - already_processed.add('decayConstant') + self.validate_Nml2Quantity_concentration( + self.resting_conc + ) # validate type Nml2Quantity_concentration + value = find_attr_value_("decayConstant", node) + if value is not None and "decayConstant" not in already_processed: + already_processed.add("decayConstant") self.decay_constant = value - self.validate_Nml2Quantity_time(self.decay_constant) # validate type Nml2Quantity_time - value = find_attr_value_('shellThickness', node) - if value is not None and 'shellThickness' not in already_processed: - already_processed.add('shellThickness') + self.validate_Nml2Quantity_time( + self.decay_constant + ) # validate type Nml2Quantity_time + value = find_attr_value_("shellThickness", node) + if value is not None and "shellThickness" not in already_processed: + already_processed.add("shellThickness") self.shell_thickness = value - self.validate_Nml2Quantity_length(self.shell_thickness) # validate type Nml2Quantity_length - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_length( + self.shell_thickness + ) # validate type Nml2Quantity_length + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(DecayingPoolConcentrationModel, self).buildAttributes(node, attrs, already_processed) + super(DecayingPoolConcentrationModel, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(DecayingPoolConcentrationModel, self).buildChildren(child_, node, nodeName_, True) + super(DecayingPoolConcentrationModel, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class DecayingPoolConcentrationModel class GateFractionalSubgate(Base): member_data_items_ = [ - MemberSpec_('fractional_conductance', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'1'}, None), + MemberSpec_( + "fractional_conductance", "Nml2Quantity_none", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {u"type": u"xs:string", u"name": u"notes", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {u"type": u"Q10Settings", u"name": u"q10Settings", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 0, + {u"type": u"HHVariable", u"name": u"steadyState", u"minOccurs": u"1"}, + None, + ), + MemberSpec_( + "time_course", + "HHTime", + 0, + 0, + {u"type": u"HHTime", u"name": u"timeCourse", u"minOccurs": u"1"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, fractional_conductance=None, notes=None, q10_settings=None, steady_state=None, time_course=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + fractional_conductance=None, + notes=None, + q10_settings=None, + steady_state=None, + time_course=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateFractionalSubgate, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(GateFractionalSubgate, self).__init__(neuro_lex_id, id, **kwargs_) self.fractional_conductance = _cast(None, fractional_conductance) self.notes = notes self.validate_Notes(self.notes) self.q10_settings = q10_settings self.steady_state = steady_state self.time_course = time_course + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateFractionalSubgate) + CurrentSubclassModule_, GateFractionalSubgate + ) if subclass is not None: return subclass(*args_, **kwargs_) if GateFractionalSubgate.subclass: return GateFractionalSubgate.subclass(*args_, **kwargs_) else: return GateFractionalSubgate(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): # Validate type Notes, a restriction on xs:string. if value is not None and Validate_simpletypes_: pass + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$"] + ] + def hasContent_(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.steady_state is not None or - self.time_course is not None or - super(GateFractionalSubgate, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.steady_state is not None + or self.time_course is not None + or super(GateFractionalSubgate, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateFractionalSubgate', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateFractionalSubgate') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateFractionalSubgate", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateFractionalSubgate") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractionalSubgate') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateFractionalSubgate", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateFractionalSubgate', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateFractionalSubgate", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateFractionalSubgate'): - super(GateFractionalSubgate, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractionalSubgate') - if self.fractional_conductance is not None and 'fractional_conductance' not in already_processed: - already_processed.add('fractional_conductance') - outfile.write(' fractionalConductance=%s' % (quote_attrib(self.fractional_conductance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateFractionalSubgate', fromsubclass_=False, pretty_print=True): - super(GateFractionalSubgate, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateFractionalSubgate", + ): + super(GateFractionalSubgate, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateFractionalSubgate", + ) + if ( + self.fractional_conductance is not None + and "fractional_conductance" not in already_processed + ): + already_processed.add("fractional_conductance") + outfile.write( + " fractionalConductance=%s" + % (quote_attrib(self.fractional_conductance),) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateFractionalSubgate", + fromsubclass_=False, + pretty_print=True, + ): + super(GateFractionalSubgate, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) if self.time_course is not None: - self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) + self.time_course.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timeCourse", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -13153,52 +23728,96 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('fractionalConductance', node) - if value is not None and 'fractionalConductance' not in already_processed: - already_processed.add('fractionalConductance') + value = find_attr_value_("fractionalConductance", node) + if value is not None and "fractionalConductance" not in already_processed: + already_processed.add("fractionalConductance") self.fractional_conductance = value - self.validate_Nml2Quantity_none(self.fractional_conductance) # validate type Nml2Quantity_none - super(GateFractionalSubgate, self).buildAttributes(node, attrs, already_processed) + self.validate_Nml2Quantity_none( + self.fractional_conductance + ) # validate type Nml2Quantity_none + super(GateFractionalSubgate, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': + if nodeName_ == "notes": notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') + notes_ = self.gds_validate_string(notes_, node, "notes") self.notes = notes_ # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) obj_.build(child_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'steadyState': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) obj_.build(child_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' - elif nodeName_ == 'timeCourse': + obj_.original_tagname_ = "steadyState" + elif nodeName_ == "timeCourse": obj_ = HHTime.factory(parent_object_=self) obj_.build(child_) self.time_course = obj_ - obj_.original_tagname_ = 'timeCourse' + obj_.original_tagname_ = "timeCourse" super(GateFractionalSubgate, self).buildChildren(child_, node, nodeName_, True) + + # end class GateFractionalSubgate class GateFractional(Base): member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('sub_gates', 'GateFractionalSubgate', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'GateFractionalSubgate', u'name': u'subGate', u'minOccurs': u'1'}, None), + MemberSpec_("instances", "PositiveInteger", 0, 0, {"use": u"required"}), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {u"type": u"xs:string", u"name": u"notes", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {u"type": u"Q10Settings", u"name": u"q10Settings", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "sub_gates", + "GateFractionalSubgate", + 1, + 0, + { + u"maxOccurs": u"unbounded", + u"type": u"GateFractionalSubgate", + u"name": u"subGate", + u"minOccurs": u"1", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, sub_gates=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + sub_gates=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateFractional, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(GateFractional, self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) self.notes = notes self.validate_Notes(self.notes) @@ -13207,74 +23826,152 @@ def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_s self.sub_gates = [] else: self.sub_gates = sub_gates + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateFractional) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateFractional) if subclass is not None: return subclass(*args_, **kwargs_) if GateFractional.subclass: return GateFractional.subclass(*args_, **kwargs_) else: return GateFractional(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): # Validate type Notes, a restriction on xs:string. if value is not None and Validate_simpletypes_: pass + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. if value is not None and Validate_simpletypes_: pass + def hasContent_(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.sub_gates or - super(GateFractional, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.sub_gates + or super(GateFractional, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateFractional', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateFractional') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateFractional", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateFractional") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractional') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateFractional" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateFractional', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateFractional", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateFractional'): - super(GateFractional, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractional') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateFractional', fromsubclass_=False, pretty_print=True): - super(GateFractional, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateFractional", + ): + super(GateFractional, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateFractional" + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write(" instances=%s" % (quote_attrib(self.instances),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateFractional", + fromsubclass_=False, + pretty_print=True, + ): + super(GateFractional, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) for subGate_ in self.sub_gates: - subGate_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='subGate', pretty_print=pretty_print) + subGate_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="subGate", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -13282,120 +23979,230 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") try: self.instances = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger super(GateFractional, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': + if nodeName_ == "notes": notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') + notes_ = self.gds_validate_string(notes_, node, "notes") self.notes = notes_ # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) obj_.build(child_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'subGate': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "subGate": obj_ = GateFractionalSubgate.factory(parent_object_=self) obj_.build(child_) self.sub_gates.append(obj_) - obj_.original_tagname_ = 'subGate' + obj_.original_tagname_ = "subGate" super(GateFractional, self).buildChildren(child_, node, nodeName_, True) + + # end class GateFractional class GateHHInstantaneous(Base): member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), + MemberSpec_("instances", "PositiveInteger", 0, 0, {"use": u"required"}), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {u"type": u"xs:string", u"name": u"notes", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 0, + {u"type": u"HHVariable", u"name": u"steadyState", u"minOccurs": u"1"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, steady_state=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + steady_state=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHInstantaneous, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(GateHHInstantaneous, self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) self.notes = notes self.validate_Notes(self.notes) self.steady_state = steady_state + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHInstantaneous) + CurrentSubclassModule_, GateHHInstantaneous + ) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHInstantaneous.subclass: return GateHHInstantaneous.subclass(*args_, **kwargs_) else: return GateHHInstantaneous(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): # Validate type Notes, a restriction on xs:string. if value is not None and Validate_simpletypes_: pass + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. if value is not None and Validate_simpletypes_: pass + def hasContent_(self): if ( - self.notes is not None or - self.steady_state is not None or - super(GateHHInstantaneous, self).hasContent_() + self.notes is not None + or self.steady_state is not None + or super(GateHHInstantaneous, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHInstantaneous', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHInstantaneous') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHInstantaneous", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHInstantaneous") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHInstantaneous') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateHHInstantaneous", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHInstantaneous', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHInstantaneous", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHInstantaneous'): - super(GateHHInstantaneous, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHInstantaneous') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHInstantaneous', fromsubclass_=False, pretty_print=True): - super(GateHHInstantaneous, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHInstantaneous", + ): + super(GateHHInstantaneous, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateHHInstantaneous", + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write(" instances=%s" % (quote_attrib(self.instances),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHInstantaneous", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHInstantaneous, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -13403,49 +24210,102 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") try: self.instances = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger super(GateHHInstantaneous, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': + if nodeName_ == "notes": notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') + notes_ = self.gds_validate_string(notes_, node, "notes") self.notes = notes_ # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'steadyState': + elif nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) obj_.build(child_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' + obj_.original_tagname_ = "steadyState" super(GateHHInstantaneous, self).buildChildren(child_, node, nodeName_, True) + + # end class GateHHInstantaneous class GateHHRatesInf(Base): member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'1'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'1'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), + MemberSpec_("instances", "PositiveInteger", 0, 0, {"use": u"required"}), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {u"type": u"xs:string", u"name": u"notes", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {u"type": u"Q10Settings", u"name": u"q10Settings", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "forward_rate", + "HHRate", + 0, + 0, + {u"type": u"HHRate", u"name": u"forwardRate", u"minOccurs": u"1"}, + None, + ), + MemberSpec_( + "reverse_rate", + "HHRate", + 0, + 0, + {u"type": u"HHRate", u"name": u"reverseRate", u"minOccurs": u"1"}, + None, + ), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 0, + {u"type": u"HHVariable", u"name": u"steadyState", u"minOccurs": u"1"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, steady_state=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + forward_rate=None, + reverse_rate=None, + steady_state=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHRatesInf, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(GateHHRatesInf, self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) self.notes = notes self.validate_Notes(self.notes) @@ -13453,80 +24313,172 @@ def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_s self.forward_rate = forward_rate self.reverse_rate = reverse_rate self.steady_state = steady_state + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHRatesInf) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateHHRatesInf) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHRatesInf.subclass: return GateHHRatesInf.subclass(*args_, **kwargs_) else: return GateHHRatesInf(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): # Validate type Notes, a restriction on xs:string. if value is not None and Validate_simpletypes_: pass + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. if value is not None and Validate_simpletypes_: pass + def hasContent_(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.forward_rate is not None or - self.reverse_rate is not None or - self.steady_state is not None or - super(GateHHRatesInf, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.forward_rate is not None + or self.reverse_rate is not None + or self.steady_state is not None + or super(GateHHRatesInf, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesInf', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHRatesInf') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHRatesInf", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHRatesInf") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesInf') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHRatesInf" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRatesInf', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHRatesInf", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRatesInf'): - super(GateHHRatesInf, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesInf') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesInf', fromsubclass_=False, pretty_print=True): - super(GateHHRatesInf, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHRatesInf", + ): + super(GateHHRatesInf, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHRatesInf" + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write(" instances=%s" % (quote_attrib(self.instances),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHRatesInf", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHRatesInf, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.forward_rate is not None: - self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) + self.forward_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="forwardRate", + pretty_print=pretty_print, + ) if self.reverse_rate is not None: - self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) + self.reverse_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="reverseRate", + pretty_print=pretty_print, + ) if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -13534,64 +24486,117 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") try: self.instances = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger super(GateHHRatesInf, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': + if nodeName_ == "notes": notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') + notes_ = self.gds_validate_string(notes_, node, "notes") self.notes = notes_ # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) obj_.build(child_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'forwardRate': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "forwardRate": obj_ = HHRate.factory(parent_object_=self) obj_.build(child_) self.forward_rate = obj_ - obj_.original_tagname_ = 'forwardRate' - elif nodeName_ == 'reverseRate': + obj_.original_tagname_ = "forwardRate" + elif nodeName_ == "reverseRate": obj_ = HHRate.factory(parent_object_=self) obj_.build(child_) self.reverse_rate = obj_ - obj_.original_tagname_ = 'reverseRate' - elif nodeName_ == 'steadyState': + obj_.original_tagname_ = "reverseRate" + elif nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) obj_.build(child_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' + obj_.original_tagname_ = "steadyState" super(GateHHRatesInf, self).buildChildren(child_, node, nodeName_, True) + + # end class GateHHRatesInf class GateHHRatesTau(Base): member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'1'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'1'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'1'}, None), + MemberSpec_("instances", "PositiveInteger", 0, 0, {"use": u"required"}), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {u"type": u"xs:string", u"name": u"notes", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {u"type": u"Q10Settings", u"name": u"q10Settings", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "forward_rate", + "HHRate", + 0, + 0, + {u"type": u"HHRate", u"name": u"forwardRate", u"minOccurs": u"1"}, + None, + ), + MemberSpec_( + "reverse_rate", + "HHRate", + 0, + 0, + {u"type": u"HHRate", u"name": u"reverseRate", u"minOccurs": u"1"}, + None, + ), + MemberSpec_( + "time_course", + "HHTime", + 0, + 0, + {u"type": u"HHTime", u"name": u"timeCourse", u"minOccurs": u"1"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, time_course=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + forward_rate=None, + reverse_rate=None, + time_course=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHRatesTau, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(GateHHRatesTau, self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) self.notes = notes self.validate_Notes(self.notes) @@ -13599,80 +24604,172 @@ def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_s self.forward_rate = forward_rate self.reverse_rate = reverse_rate self.time_course = time_course + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHRatesTau) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateHHRatesTau) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHRatesTau.subclass: return GateHHRatesTau.subclass(*args_, **kwargs_) else: return GateHHRatesTau(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): # Validate type Notes, a restriction on xs:string. if value is not None and Validate_simpletypes_: pass + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. if value is not None and Validate_simpletypes_: pass + def hasContent_(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.forward_rate is not None or - self.reverse_rate is not None or - self.time_course is not None or - super(GateHHRatesTau, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.forward_rate is not None + or self.reverse_rate is not None + or self.time_course is not None + or super(GateHHRatesTau, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesTau', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHRatesTau') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHRatesTau", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHRatesTau") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTau') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHRatesTau" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRatesTau', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHRatesTau", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRatesTau'): - super(GateHHRatesTau, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTau') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesTau', fromsubclass_=False, pretty_print=True): - super(GateHHRatesTau, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHRatesTau", + ): + super(GateHHRatesTau, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHRatesTau" + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write(" instances=%s" % (quote_attrib(self.instances),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHRatesTau", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHRatesTau, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.forward_rate is not None: - self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) + self.forward_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="forwardRate", + pretty_print=pretty_print, + ) if self.reverse_rate is not None: - self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) + self.reverse_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="reverseRate", + pretty_print=pretty_print, + ) if self.time_course is not None: - self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) + self.time_course.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timeCourse", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -13680,65 +24777,126 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") try: self.instances = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger super(GateHHRatesTau, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': + if nodeName_ == "notes": notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') + notes_ = self.gds_validate_string(notes_, node, "notes") self.notes = notes_ # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) obj_.build(child_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'forwardRate': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "forwardRate": obj_ = HHRate.factory(parent_object_=self) obj_.build(child_) self.forward_rate = obj_ - obj_.original_tagname_ = 'forwardRate' - elif nodeName_ == 'reverseRate': + obj_.original_tagname_ = "forwardRate" + elif nodeName_ == "reverseRate": obj_ = HHRate.factory(parent_object_=self) obj_.build(child_) self.reverse_rate = obj_ - obj_.original_tagname_ = 'reverseRate' - elif nodeName_ == 'timeCourse': + obj_.original_tagname_ = "reverseRate" + elif nodeName_ == "timeCourse": obj_ = HHTime.factory(parent_object_=self) obj_.build(child_) self.time_course = obj_ - obj_.original_tagname_ = 'timeCourse' + obj_.original_tagname_ = "timeCourse" super(GateHHRatesTau, self).buildChildren(child_, node, nodeName_, True) + + # end class GateHHRatesTau class GateHHRatesTauInf(Base): member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'1'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'1'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'1'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), + MemberSpec_("instances", "PositiveInteger", 0, 0, {"use": u"required"}), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {u"type": u"xs:string", u"name": u"notes", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {u"type": u"Q10Settings", u"name": u"q10Settings", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "forward_rate", + "HHRate", + 0, + 0, + {u"type": u"HHRate", u"name": u"forwardRate", u"minOccurs": u"1"}, + None, + ), + MemberSpec_( + "reverse_rate", + "HHRate", + 0, + 0, + {u"type": u"HHRate", u"name": u"reverseRate", u"minOccurs": u"1"}, + None, + ), + MemberSpec_( + "time_course", + "HHTime", + 0, + 0, + {u"type": u"HHTime", u"name": u"timeCourse", u"minOccurs": u"1"}, + None, + ), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 0, + {u"type": u"HHVariable", u"name": u"steadyState", u"minOccurs": u"1"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, time_course=None, steady_state=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + forward_rate=None, + reverse_rate=None, + time_course=None, + steady_state=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHRatesTauInf, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(GateHHRatesTauInf, self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) self.notes = notes self.validate_Notes(self.notes) @@ -13747,83 +24905,190 @@ def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_s self.reverse_rate = reverse_rate self.time_course = time_course self.steady_state = steady_state + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHRatesTauInf) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateHHRatesTauInf) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHRatesTauInf.subclass: return GateHHRatesTauInf.subclass(*args_, **kwargs_) else: return GateHHRatesTauInf(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): # Validate type Notes, a restriction on xs:string. if value is not None and Validate_simpletypes_: pass + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. if value is not None and Validate_simpletypes_: pass + def hasContent_(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.forward_rate is not None or - self.reverse_rate is not None or - self.time_course is not None or - self.steady_state is not None or - super(GateHHRatesTauInf, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.forward_rate is not None + or self.reverse_rate is not None + or self.time_course is not None + or self.steady_state is not None + or super(GateHHRatesTauInf, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesTauInf', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHRatesTauInf') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHRatesTauInf", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHRatesTauInf") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTauInf') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateHHRatesTauInf", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRatesTauInf', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHRatesTauInf", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRatesTauInf'): - super(GateHHRatesTauInf, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTauInf') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesTauInf', fromsubclass_=False, pretty_print=True): - super(GateHHRatesTauInf, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHRatesTauInf", + ): + super(GateHHRatesTauInf, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateHHRatesTauInf", + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write(" instances=%s" % (quote_attrib(self.instances),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHRatesTauInf", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHRatesTauInf, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.forward_rate is not None: - self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) + self.forward_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="forwardRate", + pretty_print=pretty_print, + ) if self.reverse_rate is not None: - self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) + self.reverse_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="reverseRate", + pretty_print=pretty_print, + ) if self.time_course is not None: - self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) + self.time_course.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timeCourse", + pretty_print=pretty_print, + ) if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -13831,145 +25096,275 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") try: self.instances = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger super(GateHHRatesTauInf, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': + if nodeName_ == "notes": notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') + notes_ = self.gds_validate_string(notes_, node, "notes") self.notes = notes_ # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) obj_.build(child_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'forwardRate': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "forwardRate": obj_ = HHRate.factory(parent_object_=self) obj_.build(child_) self.forward_rate = obj_ - obj_.original_tagname_ = 'forwardRate' - elif nodeName_ == 'reverseRate': + obj_.original_tagname_ = "forwardRate" + elif nodeName_ == "reverseRate": obj_ = HHRate.factory(parent_object_=self) obj_.build(child_) self.reverse_rate = obj_ - obj_.original_tagname_ = 'reverseRate' - elif nodeName_ == 'timeCourse': + obj_.original_tagname_ = "reverseRate" + elif nodeName_ == "timeCourse": obj_ = HHTime.factory(parent_object_=self) obj_.build(child_) self.time_course = obj_ - obj_.original_tagname_ = 'timeCourse' - elif nodeName_ == 'steadyState': + obj_.original_tagname_ = "timeCourse" + elif nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) obj_.build(child_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' + obj_.original_tagname_ = "steadyState" super(GateHHRatesTauInf, self).buildChildren(child_, node, nodeName_, True) + + # end class GateHHRatesTauInf class GateHHTauInf(Base): member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'1'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), + MemberSpec_("instances", "PositiveInteger", 0, 0, {"use": u"required"}), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {u"type": u"xs:string", u"name": u"notes", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {u"type": u"Q10Settings", u"name": u"q10Settings", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "time_course", + "HHTime", + 0, + 0, + {u"type": u"HHTime", u"name": u"timeCourse", u"minOccurs": u"1"}, + None, + ), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 0, + {u"type": u"HHVariable", u"name": u"steadyState", u"minOccurs": u"1"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, time_course=None, steady_state=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + time_course=None, + steady_state=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHTauInf, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(GateHHTauInf, self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) self.notes = notes self.validate_Notes(self.notes) self.q10_settings = q10_settings self.time_course = time_course self.steady_state = steady_state + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHTauInf) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateHHTauInf) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHTauInf.subclass: return GateHHTauInf.subclass(*args_, **kwargs_) else: return GateHHTauInf(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): # Validate type Notes, a restriction on xs:string. if value is not None and Validate_simpletypes_: pass + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. if value is not None and Validate_simpletypes_: pass + def hasContent_(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.time_course is not None or - self.steady_state is not None or - super(GateHHTauInf, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.time_course is not None + or self.steady_state is not None + or super(GateHHTauInf, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHTauInf', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHTauInf') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHTauInf", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHTauInf") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHTauInf') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHTauInf" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHTauInf', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHTauInf", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHTauInf'): - super(GateHHTauInf, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHTauInf') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHTauInf', fromsubclass_=False, pretty_print=True): - super(GateHHTauInf, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHTauInf", + ): + super(GateHHTauInf, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHTauInf" + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write(" instances=%s" % (quote_attrib(self.instances),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHTauInf", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHTauInf, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.time_course is not None: - self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) + self.time_course.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timeCourse", + pretty_print=pretty_print, + ) if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -13977,135 +25372,265 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") try: self.instances = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger super(GateHHTauInf, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': + if nodeName_ == "notes": notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') + notes_ = self.gds_validate_string(notes_, node, "notes") self.notes = notes_ # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) obj_.build(child_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'timeCourse': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "timeCourse": obj_ = HHTime.factory(parent_object_=self) obj_.build(child_) self.time_course = obj_ - obj_.original_tagname_ = 'timeCourse' - elif nodeName_ == 'steadyState': + obj_.original_tagname_ = "timeCourse" + elif nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) obj_.build(child_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' + obj_.original_tagname_ = "steadyState" super(GateHHTauInf, self).buildChildren(child_, node, nodeName_, True) + + # end class GateHHTauInf class GateHHRates(Base): member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'1'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'1'}, None), + MemberSpec_("instances", "PositiveInteger", 0, 0, {"use": u"required"}), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {u"type": u"xs:string", u"name": u"notes", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {u"type": u"Q10Settings", u"name": u"q10Settings", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "forward_rate", + "HHRate", + 0, + 0, + {u"type": u"HHRate", u"name": u"forwardRate", u"minOccurs": u"1"}, + None, + ), + MemberSpec_( + "reverse_rate", + "HHRate", + 0, + 0, + {u"type": u"HHRate", u"name": u"reverseRate", u"minOccurs": u"1"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + forward_rate=None, + reverse_rate=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHRates, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(GateHHRates, self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) self.notes = notes self.validate_Notes(self.notes) self.q10_settings = q10_settings self.forward_rate = forward_rate self.reverse_rate = reverse_rate + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHRates) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateHHRates) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHRates.subclass: return GateHHRates.subclass(*args_, **kwargs_) else: return GateHHRates(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): # Validate type Notes, a restriction on xs:string. if value is not None and Validate_simpletypes_: pass + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. if value is not None and Validate_simpletypes_: pass + def hasContent_(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.forward_rate is not None or - self.reverse_rate is not None or - super(GateHHRates, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.forward_rate is not None + or self.reverse_rate is not None + or super(GateHHRates, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRates', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHRates') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHRates", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHRates") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRates') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHRates" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRates', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHRates", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRates'): - super(GateHHRates, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRates') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRates', fromsubclass_=False, pretty_print=True): - super(GateHHRates, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHRates", + ): + super(GateHHRates, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHRates" + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write(" instances=%s" % (quote_attrib(self.instances),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHRates", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHRates, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.forward_rate is not None: - self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) + self.forward_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="forwardRate", + pretty_print=pretty_print, + ) if self.reverse_rate is not None: - self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) + self.reverse_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="reverseRate", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -14113,41 +25638,47 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") try: self.instances = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger super(GateHHRates, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': + if nodeName_ == "notes": notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') + notes_ = self.gds_validate_string(notes_, node, "notes") self.notes = notes_ # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) obj_.build(child_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'forwardRate': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "forwardRate": obj_ = HHRate.factory(parent_object_=self) obj_.build(child_) self.forward_rate = obj_ - obj_.original_tagname_ = 'forwardRate' - elif nodeName_ == 'reverseRate': + obj_.original_tagname_ = "forwardRate" + elif nodeName_ == "reverseRate": obj_ = HHRate.factory(parent_object_=self) obj_.build(child_) self.reverse_rate = obj_ - obj_.original_tagname_ = 'reverseRate' + obj_.original_tagname_ = "reverseRate" super(GateHHRates, self).buildChildren(child_, node, nodeName_, True) + + # end class GateHHRates @@ -14155,23 +25686,93 @@ class GateHHUndetermined(Base): """Note all sub elements for gateHHrates, gateHHratesTau, gateFractional etc. allowed here. Which are valid should be constrained by what type is set""" + member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('type', 'gateTypes', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 1, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'0'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 1, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'0'}, None), - MemberSpec_('time_course', 'HHTime', 0, 1, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'0'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 1, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'0'}, None), - MemberSpec_('sub_gates', 'GateFractionalSubgate', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateFractionalSubgate', u'name': u'subGate', u'minOccurs': u'0'}, None), + MemberSpec_("instances", "PositiveInteger", 0, 0, {"use": u"required"}), + MemberSpec_("type", "gateTypes", 0, 0, {"use": u"required"}), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {u"type": u"xs:string", u"name": u"notes", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {u"type": u"Q10Settings", u"name": u"q10Settings", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "forward_rate", + "HHRate", + 0, + 1, + {u"type": u"HHRate", u"name": u"forwardRate", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "reverse_rate", + "HHRate", + 0, + 1, + {u"type": u"HHRate", u"name": u"reverseRate", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "time_course", + "HHTime", + 0, + 1, + {u"type": u"HHTime", u"name": u"timeCourse", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 1, + {u"type": u"HHVariable", u"name": u"steadyState", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "sub_gates", + "GateFractionalSubgate", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"GateFractionalSubgate", + u"name": u"subGate", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, type=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, time_course=None, steady_state=None, sub_gates=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + type=None, + notes=None, + q10_settings=None, + forward_rate=None, + reverse_rate=None, + time_course=None, + steady_state=None, + sub_gates=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHUndetermined, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(GateHHUndetermined, self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) self.type = _cast(None, type) self.notes = notes @@ -14185,101 +25786,230 @@ def __init__(self, neuro_lex_id=None, id=None, instances=None, type=None, notes= self.sub_gates = [] else: self.sub_gates = sub_gates + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHUndetermined) + CurrentSubclassModule_, GateHHUndetermined + ) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHUndetermined.subclass: return GateHHUndetermined.subclass(*args_, **kwargs_) else: return GateHHUndetermined(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): # Validate type Notes, a restriction on xs:string. if value is not None and Validate_simpletypes_: pass + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. if value is not None and Validate_simpletypes_: pass + def validate_gateTypes(self, value): # Validate type gateTypes, a restriction on xs:string. if value is not None and Validate_simpletypes_: value = str(value) - enumerations = ['gateHHrates', 'gateHHratesTau', 'gateHHtauInf', 'gateHHratesInf', 'gateHHratesTauInf', 'gateHHInstantaneous', 'gateKS', 'gateFractional'] + enumerations = [ + "gateHHrates", + "gateHHratesTau", + "gateHHtauInf", + "gateHHratesInf", + "gateHHratesTauInf", + "gateHHInstantaneous", + "gateKS", + "gateFractional", + ] enumeration_respectee = False for enum in enumerations: if value == enum: enumeration_respectee = True break if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on gateTypes' % {"value" : value.encode("utf-8")} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd enumeration restriction on gateTypes' + % {"value": value.encode("utf-8")} + ) + def hasContent_(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.forward_rate is not None or - self.reverse_rate is not None or - self.time_course is not None or - self.steady_state is not None or - self.sub_gates or - super(GateHHUndetermined, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.forward_rate is not None + or self.reverse_rate is not None + or self.time_course is not None + or self.steady_state is not None + or self.sub_gates + or super(GateHHUndetermined, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHUndetermined', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHUndetermined') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHUndetermined", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHUndetermined") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHUndetermined') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateHHUndetermined", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHUndetermined', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHUndetermined", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHUndetermined'): - super(GateHHUndetermined, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHUndetermined') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHUndetermined', fromsubclass_=False, pretty_print=True): - super(GateHHUndetermined, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHUndetermined", + ): + super(GateHHUndetermined, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateHHUndetermined", + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write(" instances=%s" % (quote_attrib(self.instances),)) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write(" type=%s" % (quote_attrib(self.type),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateHHUndetermined", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHUndetermined, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.forward_rate is not None: - self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) + self.forward_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="forwardRate", + pretty_print=pretty_print, + ) if self.reverse_rate is not None: - self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) + self.reverse_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="reverseRate", + pretty_print=pretty_print, + ) if self.time_course is not None: - self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) + self.time_course.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timeCourse", + pretty_print=pretty_print, + ) if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) for subGate_ in self.sub_gates: - subGate_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='subGate', pretty_print=pretty_print) + subGate_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="subGate", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -14287,81 +26017,160 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") try: self.instances = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_gateTypes(self.type) # validate type gateTypes + self.validate_gateTypes(self.type) # validate type gateTypes super(GateHHUndetermined, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': + if nodeName_ == "notes": notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') + notes_ = self.gds_validate_string(notes_, node, "notes") self.notes = notes_ # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) obj_.build(child_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'forwardRate': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "forwardRate": obj_ = HHRate.factory(parent_object_=self) obj_.build(child_) self.forward_rate = obj_ - obj_.original_tagname_ = 'forwardRate' - elif nodeName_ == 'reverseRate': + obj_.original_tagname_ = "forwardRate" + elif nodeName_ == "reverseRate": obj_ = HHRate.factory(parent_object_=self) obj_.build(child_) self.reverse_rate = obj_ - obj_.original_tagname_ = 'reverseRate' - elif nodeName_ == 'timeCourse': + obj_.original_tagname_ = "reverseRate" + elif nodeName_ == "timeCourse": obj_ = HHTime.factory(parent_object_=self) obj_.build(child_) self.time_course = obj_ - obj_.original_tagname_ = 'timeCourse' - elif nodeName_ == 'steadyState': + obj_.original_tagname_ = "timeCourse" + elif nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) obj_.build(child_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' - elif nodeName_ == 'subGate': + obj_.original_tagname_ = "steadyState" + elif nodeName_ == "subGate": obj_ = GateFractionalSubgate.factory(parent_object_=self) obj_.build(child_) self.sub_gates.append(obj_) - obj_.original_tagname_ = 'subGate' + obj_.original_tagname_ = "subGate" super(GateHHUndetermined, self).buildChildren(child_, node, nodeName_, True) + + # end class GateHHUndetermined class GateKS(Base): member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('closed_states', 'ClosedState', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'ClosedState', u'name': u'closedState', u'minOccurs': u'1'}, None), - MemberSpec_('open_states', 'OpenState', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'OpenState', u'name': u'openState', u'minOccurs': u'1'}, None), - MemberSpec_('forward_transition', 'ForwardTransition', 1, 0, {u'type': u'ForwardTransition', u'name': u'forwardTransition'}, 2), - MemberSpec_('reverse_transition', 'ReverseTransition', 1, 0, {u'type': u'ReverseTransition', u'name': u'reverseTransition'}, 2), - MemberSpec_('tau_inf_transition', 'TauInfTransition', 1, 0, {u'type': u'TauInfTransition', u'name': u'tauInfTransition'}, 2), + MemberSpec_("instances", "PositiveInteger", 0, 0, {"use": u"required"}), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {u"type": u"xs:string", u"name": u"notes", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {u"type": u"Q10Settings", u"name": u"q10Settings", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "closed_states", + "ClosedState", + 1, + 0, + { + u"maxOccurs": u"unbounded", + u"type": u"ClosedState", + u"name": u"closedState", + u"minOccurs": u"1", + }, + None, + ), + MemberSpec_( + "open_states", + "OpenState", + 1, + 0, + { + u"maxOccurs": u"unbounded", + u"type": u"OpenState", + u"name": u"openState", + u"minOccurs": u"1", + }, + None, + ), + MemberSpec_( + "forward_transition", + "ForwardTransition", + 1, + 0, + {u"type": u"ForwardTransition", u"name": u"forwardTransition"}, + 2, + ), + MemberSpec_( + "reverse_transition", + "ReverseTransition", + 1, + 0, + {u"type": u"ReverseTransition", u"name": u"reverseTransition"}, + 2, + ), + MemberSpec_( + "tau_inf_transition", + "TauInfTransition", + 1, + 0, + {u"type": u"TauInfTransition", u"name": u"tauInfTransition"}, + 2, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, closed_states=None, open_states=None, forward_transition=None, reverse_transition=None, tau_inf_transition=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + closed_states=None, + open_states=None, + forward_transition=None, + reverse_transition=None, + tau_inf_transition=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateKS, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(GateKS, self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) self.notes = notes self.validate_Notes(self.notes) @@ -14386,86 +26195,187 @@ def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_s self.tau_inf_transition = [] else: self.tau_inf_transition = tau_inf_transition + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateKS) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateKS) if subclass is not None: return subclass(*args_, **kwargs_) if GateKS.subclass: return GateKS.subclass(*args_, **kwargs_) else: return GateKS(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): # Validate type Notes, a restriction on xs:string. if value is not None and Validate_simpletypes_: pass + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. if value is not None and Validate_simpletypes_: pass + def hasContent_(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.closed_states or - self.open_states or - self.forward_transition or - self.reverse_transition or - self.tau_inf_transition or - super(GateKS, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.closed_states + or self.open_states + or self.forward_transition + or self.reverse_transition + or self.tau_inf_transition + or super(GateKS, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateKS', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateKS') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateKS", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateKS") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateKS') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateKS" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateKS', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateKS", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateKS'): - super(GateKS, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateKS') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateKS', fromsubclass_=False, pretty_print=True): - super(GateKS, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="GateKS" + ): + super(GateKS, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateKS" + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write(" instances=%s" % (quote_attrib(self.instances),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GateKS", + fromsubclass_=False, + pretty_print=True, + ): + super(GateKS, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) for closedState_ in self.closed_states: - closedState_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='closedState', pretty_print=pretty_print) + closedState_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="closedState", + pretty_print=pretty_print, + ) for openState_ in self.open_states: - openState_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='openState', pretty_print=pretty_print) + openState_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="openState", + pretty_print=pretty_print, + ) for forwardTransition_ in self.forward_transition: - forwardTransition_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardTransition', pretty_print=pretty_print) + forwardTransition_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="forwardTransition", + pretty_print=pretty_print, + ) for reverseTransition_ in self.reverse_transition: - reverseTransition_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseTransition', pretty_print=pretty_print) + reverseTransition_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="reverseTransition", + pretty_print=pretty_print, + ) for tauInfTransition_ in self.tau_inf_transition: - tauInfTransition_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='tauInfTransition', pretty_print=pretty_print) + tauInfTransition_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="tauInfTransition", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -14473,142 +26383,253 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") try: self.instances = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger super(GateKS, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': + if nodeName_ == "notes": notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') + notes_ = self.gds_validate_string(notes_, node, "notes") self.notes = notes_ # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) obj_.build(child_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'closedState': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "closedState": obj_ = ClosedState.factory(parent_object_=self) obj_.build(child_) self.closed_states.append(obj_) - obj_.original_tagname_ = 'closedState' - elif nodeName_ == 'openState': + obj_.original_tagname_ = "closedState" + elif nodeName_ == "openState": obj_ = OpenState.factory(parent_object_=self) obj_.build(child_) self.open_states.append(obj_) - obj_.original_tagname_ = 'openState' - elif nodeName_ == 'forwardTransition': + obj_.original_tagname_ = "openState" + elif nodeName_ == "forwardTransition": obj_ = ForwardTransition.factory(parent_object_=self) obj_.build(child_) self.forward_transition.append(obj_) - obj_.original_tagname_ = 'forwardTransition' - elif nodeName_ == 'reverseTransition': + obj_.original_tagname_ = "forwardTransition" + elif nodeName_ == "reverseTransition": obj_ = ReverseTransition.factory(parent_object_=self) obj_.build(child_) self.reverse_transition.append(obj_) - obj_.original_tagname_ = 'reverseTransition' - elif nodeName_ == 'tauInfTransition': + obj_.original_tagname_ = "reverseTransition" + elif nodeName_ == "tauInfTransition": obj_ = TauInfTransition.factory(parent_object_=self) obj_.build(child_) self.tau_inf_transition.append(obj_) - obj_.original_tagname_ = 'tauInfTransition' + obj_.original_tagname_ = "tauInfTransition" super(GateKS, self).buildChildren(child_, node, nodeName_, True) + + # end class GateKS class TauInfTransition(Base): member_data_items_ = [ - MemberSpec_('from_', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('to', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse'}, None), + MemberSpec_("from_", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("to", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 0, + {u"type": u"HHVariable", u"name": u"steadyState"}, + None, + ), + MemberSpec_( + "time_course", + "HHTime", + 0, + 0, + {u"type": u"HHTime", u"name": u"timeCourse"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, from_=None, to=None, steady_state=None, time_course=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + from_=None, + to=None, + steady_state=None, + time_course=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(TauInfTransition, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(TauInfTransition, self).__init__(neuro_lex_id, id, **kwargs_) self.from_ = _cast(None, from_) self.to = _cast(None, to) self.steady_state = steady_state self.time_course = time_course + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, TauInfTransition) + subclass = getSubclassFromModule_(CurrentSubclassModule_, TauInfTransition) if subclass is not None: return subclass(*args_, **kwargs_) if TauInfTransition.subclass: return TauInfTransition.subclass(*args_, **kwargs_) else: return TauInfTransition(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): if ( - self.steady_state is not None or - self.time_course is not None or - super(TauInfTransition, self).hasContent_() + self.steady_state is not None + or self.time_course is not None + or super(TauInfTransition, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TauInfTransition', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('TauInfTransition') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="TauInfTransition", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("TauInfTransition") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TauInfTransition') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="TauInfTransition", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TauInfTransition', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="TauInfTransition", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TauInfTransition'): - super(TauInfTransition, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TauInfTransition') - if self.from_ is not None and 'from_' not in already_processed: - already_processed.add('from_') - outfile.write(' from=%s' % (quote_attrib(self.from_), )) - if self.to is not None and 'to' not in already_processed: - already_processed.add('to') - outfile.write(' to=%s' % (quote_attrib(self.to), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TauInfTransition', fromsubclass_=False, pretty_print=True): - super(TauInfTransition, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="TauInfTransition", + ): + super(TauInfTransition, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="TauInfTransition", + ) + if self.from_ is not None and "from_" not in already_processed: + already_processed.add("from_") + outfile.write(" from=%s" % (quote_attrib(self.from_),)) + if self.to is not None and "to" not in already_processed: + already_processed.add("to") + outfile.write(" to=%s" % (quote_attrib(self.to),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="TauInfTransition", + fromsubclass_=False, + pretty_print=True, + ): + super(TauInfTransition, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) if self.time_course is not None: - self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) + self.time_course.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timeCourse", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -14616,114 +26637,202 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('from', node) - if value is not None and 'from' not in already_processed: - already_processed.add('from') + value = find_attr_value_("from", node) + if value is not None and "from" not in already_processed: + already_processed.add("from") self.from_ = value - self.validate_NmlId(self.from_) # validate type NmlId - value = find_attr_value_('to', node) - if value is not None and 'to' not in already_processed: - already_processed.add('to') + self.validate_NmlId(self.from_) # validate type NmlId + value = find_attr_value_("to", node) + if value is not None and "to" not in already_processed: + already_processed.add("to") self.to = value - self.validate_NmlId(self.to) # validate type NmlId + self.validate_NmlId(self.to) # validate type NmlId super(TauInfTransition, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'steadyState': + if nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) obj_.build(child_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' - elif nodeName_ == 'timeCourse': + obj_.original_tagname_ = "steadyState" + elif nodeName_ == "timeCourse": obj_ = HHTime.factory(parent_object_=self) obj_.build(child_) self.time_course = obj_ - obj_.original_tagname_ = 'timeCourse' + obj_.original_tagname_ = "timeCourse" super(TauInfTransition, self).buildChildren(child_, node, nodeName_, True) + + # end class TauInfTransition class ReverseTransition(Base): member_data_items_ = [ - MemberSpec_('from_', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('to', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_("from_", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("to", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "__ANY__", + "__ANY__", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"processContents": u"skip", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, from_=None, to=None, anytypeobjs_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + from_=None, + to=None, + anytypeobjs_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ReverseTransition, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ReverseTransition, self).__init__(neuro_lex_id, id, **kwargs_) self.from_ = _cast(None, from_) self.to = _cast(None, to) if anytypeobjs_ is None: self.anytypeobjs_ = [] else: self.anytypeobjs_ = anytypeobjs_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ReverseTransition) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ReverseTransition) if subclass is not None: return subclass(*args_, **kwargs_) if ReverseTransition.subclass: return ReverseTransition.subclass(*args_, **kwargs_) else: return ReverseTransition(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): - if ( - self.anytypeobjs_ or - super(ReverseTransition, self).hasContent_() - ): + if self.anytypeobjs_ or super(ReverseTransition, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReverseTransition', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReverseTransition') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ReverseTransition", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ReverseTransition") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReverseTransition') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ReverseTransition", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ReverseTransition', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ReverseTransition", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ReverseTransition'): - super(ReverseTransition, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReverseTransition') - if self.from_ is not None and 'from_' not in already_processed: - already_processed.add('from_') - outfile.write(' from=%s' % (quote_attrib(self.from_), )) - if self.to is not None and 'to' not in already_processed: - already_processed.add('to') - outfile.write(' to=%s' % (quote_attrib(self.to), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReverseTransition', fromsubclass_=False, pretty_print=True): - super(ReverseTransition, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ReverseTransition", + ): + super(ReverseTransition, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ReverseTransition", + ) + if self.from_ is not None and "from_" not in already_processed: + already_processed.add("from_") + outfile.write(" from=%s" % (quote_attrib(self.from_),)) + if self.to is not None and "to" not in already_processed: + already_processed.add("to") + outfile.write(" to=%s" % (quote_attrib(self.to),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ReverseTransition", + fromsubclass_=False, + pretty_print=True, + ): + super(ReverseTransition, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for obj_ in self.anytypeobjs_: obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -14731,107 +26840,195 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('from', node) - if value is not None and 'from' not in already_processed: - already_processed.add('from') + value = find_attr_value_("from", node) + if value is not None and "from" not in already_processed: + already_processed.add("from") self.from_ = value - self.validate_NmlId(self.from_) # validate type NmlId - value = find_attr_value_('to', node) - if value is not None and 'to' not in already_processed: - already_processed.add('to') + self.validate_NmlId(self.from_) # validate type NmlId + value = find_attr_value_("to", node) + if value is not None and "to" not in already_processed: + already_processed.add("to") self.to = value - self.validate_NmlId(self.to) # validate type NmlId + self.validate_NmlId(self.to) # validate type NmlId super(ReverseTransition, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'ReverseTransition') + obj_ = self.gds_build_any(child_, "ReverseTransition") if obj_ is not None: self.add_anytypeobjs_(obj_) super(ReverseTransition, self).buildChildren(child_, node, nodeName_, True) + + # end class ReverseTransition class ForwardTransition(Base): member_data_items_ = [ - MemberSpec_('from_', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('to', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_("from_", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("to", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "__ANY__", + "__ANY__", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"processContents": u"skip", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, from_=None, to=None, anytypeobjs_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + from_=None, + to=None, + anytypeobjs_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ForwardTransition, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ForwardTransition, self).__init__(neuro_lex_id, id, **kwargs_) self.from_ = _cast(None, from_) self.to = _cast(None, to) if anytypeobjs_ is None: self.anytypeobjs_ = [] else: self.anytypeobjs_ = anytypeobjs_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ForwardTransition) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ForwardTransition) if subclass is not None: return subclass(*args_, **kwargs_) if ForwardTransition.subclass: return ForwardTransition.subclass(*args_, **kwargs_) else: return ForwardTransition(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): - if ( - self.anytypeobjs_ or - super(ForwardTransition, self).hasContent_() - ): + if self.anytypeobjs_ or super(ForwardTransition, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ForwardTransition', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ForwardTransition') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ForwardTransition", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ForwardTransition") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ForwardTransition') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ForwardTransition", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ForwardTransition', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ForwardTransition", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ForwardTransition'): - super(ForwardTransition, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ForwardTransition') - if self.from_ is not None and 'from_' not in already_processed: - already_processed.add('from_') - outfile.write(' from=%s' % (quote_attrib(self.from_), )) - if self.to is not None and 'to' not in already_processed: - already_processed.add('to') - outfile.write(' to=%s' % (quote_attrib(self.to), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ForwardTransition', fromsubclass_=False, pretty_print=True): - super(ForwardTransition, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ForwardTransition", + ): + super(ForwardTransition, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ForwardTransition", + ) + if self.from_ is not None and "from_" not in already_processed: + already_processed.add("from_") + outfile.write(" from=%s" % (quote_attrib(self.from_),)) + if self.to is not None and "to" not in already_processed: + already_processed.add("to") + outfile.write(" to=%s" % (quote_attrib(self.to),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ForwardTransition", + fromsubclass_=False, + pretty_print=True, + ): + super(ForwardTransition, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for obj_ in self.anytypeobjs_: obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -14839,78 +27036,125 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('from', node) - if value is not None and 'from' not in already_processed: - already_processed.add('from') + value = find_attr_value_("from", node) + if value is not None and "from" not in already_processed: + already_processed.add("from") self.from_ = value - self.validate_NmlId(self.from_) # validate type NmlId - value = find_attr_value_('to', node) - if value is not None and 'to' not in already_processed: - already_processed.add('to') + self.validate_NmlId(self.from_) # validate type NmlId + value = find_attr_value_("to", node) + if value is not None and "to" not in already_processed: + already_processed.add("to") self.to = value - self.validate_NmlId(self.to) # validate type NmlId + self.validate_NmlId(self.to) # validate type NmlId super(ForwardTransition, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'ForwardTransition') + obj_ = self.gds_build_any(child_, "ForwardTransition") if obj_ is not None: self.add_anytypeobjs_(obj_) super(ForwardTransition, self).buildChildren(child_, node, nodeName_, True) + + # end class ForwardTransition class OpenState(Base): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = Base + def __init__(self, neuro_lex_id=None, id=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(OpenState, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(OpenState, self).__init__(neuro_lex_id, id, **kwargs_) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, OpenState) + subclass = getSubclassFromModule_(CurrentSubclassModule_, OpenState) if subclass is not None: return subclass(*args_, **kwargs_) if OpenState.subclass: return OpenState.subclass(*args_, **kwargs_) else: return OpenState(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(OpenState, self).hasContent_() - ): + if super(OpenState, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='OpenState', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('OpenState') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="OpenState", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("OpenState") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OpenState') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="OpenState" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='OpenState', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='OpenState'): - super(OpenState, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OpenState') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='OpenState', fromsubclass_=False, pretty_print=True): - super(OpenState, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="OpenState", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="OpenState" + ): + super(OpenState, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="OpenState" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="OpenState", + fromsubclass_=False, + pretty_print=True, + ): + super(OpenState, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -14918,66 +27162,118 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(OpenState, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(OpenState, self).buildChildren(child_, node, nodeName_, True) pass + + # end class OpenState class ClosedState(Base): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = Base + def __init__(self, neuro_lex_id=None, id=None, **kwargs_): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ClosedState, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ClosedState, self).__init__(neuro_lex_id, id, **kwargs_) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ClosedState) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ClosedState) if subclass is not None: return subclass(*args_, **kwargs_) if ClosedState.subclass: return ClosedState.subclass(*args_, **kwargs_) else: return ClosedState(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(ClosedState, self).hasContent_() - ): + if super(ClosedState, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ClosedState', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ClosedState') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ClosedState", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ClosedState") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ClosedState') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ClosedState" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ClosedState', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ClosedState'): - super(ClosedState, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ClosedState') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ClosedState', fromsubclass_=False, pretty_print=True): - super(ClosedState, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ClosedState", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ClosedState", + ): + super(ClosedState, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ClosedState" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ClosedState", + fromsubclass_=False, + pretty_print=True, + ): + super(ClosedState, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -14985,103 +27281,211 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(ClosedState, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ClosedState, self).buildChildren(child_, node, nodeName_, True) pass + + # end class ClosedState class IonChannelKS(Standalone): """Kinetic scheme based ion channel.""" + member_data_items_ = [ - MemberSpec_('species', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 1, {'use': u'optional'}), - MemberSpec_('gate_kses', 'GateKS', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateKS', u'name': u'gateKS', u'minOccurs': u'0'}, None), + MemberSpec_("species", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_( + "conductance", "Nml2Quantity_conductance", 0, 1, {"use": u"optional"} + ), + MemberSpec_( + "gate_kses", + "GateKS", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"GateKS", + u"name": u"gateKS", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, species=None, conductance=None, gate_kses=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + species=None, + conductance=None, + gate_kses=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannelKS, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IonChannelKS, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.species = _cast(None, species) self.conductance = _cast(None, conductance) if gate_kses is None: self.gate_kses = [] else: self.gate_kses = gate_kses + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IonChannelKS) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IonChannelKS) if subclass is not None: return subclass(*args_, **kwargs_) if IonChannelKS.subclass: return IonChannelKS.subclass(*args_, **kwargs_) else: return IonChannelKS(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$"] + ] + def hasContent_(self): - if ( - self.gate_kses or - super(IonChannelKS, self).hasContent_() - ): + if self.gate_kses or super(IonChannelKS, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelKS', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannelKS') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannelKS", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IonChannelKS") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelKS') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IonChannelKS" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelKS', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IonChannelKS", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelKS'): - super(IonChannelKS, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelKS') - if self.species is not None and 'species' not in already_processed: - already_processed.add('species') - outfile.write(' species=%s' % (quote_attrib(self.species), )) - if self.conductance is not None and 'conductance' not in already_processed: - already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelKS', fromsubclass_=False, pretty_print=True): - super(IonChannelKS, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IonChannelKS", + ): + super(IonChannelKS, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IonChannelKS" + ) + if self.species is not None and "species" not in already_processed: + already_processed.add("species") + outfile.write(" species=%s" % (quote_attrib(self.species),)) + if self.conductance is not None and "conductance" not in already_processed: + already_processed.add("conductance") + outfile.write(" conductance=%s" % (quote_attrib(self.conductance),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannelKS", + fromsubclass_=False, + pretty_print=True, + ): + super(IonChannelKS, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for gateKS_ in self.gate_kses: - gateKS_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateKS', pretty_print=pretty_print) + gateKS_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateKS", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -15089,97 +27493,203 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('species', node) - if value is not None and 'species' not in already_processed: - already_processed.add('species') + value = find_attr_value_("species", node) + if value is not None and "species" not in already_processed: + already_processed.add("species") self.species = value - self.validate_NmlId(self.species) # validate type NmlId - value = find_attr_value_('conductance', node) - if value is not None and 'conductance' not in already_processed: - already_processed.add('conductance') + self.validate_NmlId(self.species) # validate type NmlId + value = find_attr_value_("conductance", node) + if value is not None and "conductance" not in already_processed: + already_processed.add("conductance") self.conductance = value - self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance + self.validate_Nml2Quantity_conductance( + self.conductance + ) # validate type Nml2Quantity_conductance super(IonChannelKS, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'gateKS': + if nodeName_ == "gateKS": obj_ = GateKS.factory(parent_object_=self) obj_.build(child_) self.gate_kses.append(obj_) - obj_.original_tagname_ = 'gateKS' + obj_.original_tagname_ = "gateKS" super(IonChannelKS, self).buildChildren(child_, node, nodeName_, True) + + # end class IonChannelKS class IonChannelScalable(Standalone): member_data_items_ = [ - MemberSpec_('q10_conductance_scalings', 'Q10ConductanceScaling', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Q10ConductanceScaling', u'name': u'q10ConductanceScaling', u'minOccurs': u'0'}, None), + MemberSpec_( + "q10_conductance_scalings", + "Q10ConductanceScaling", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Q10ConductanceScaling", + u"name": u"q10ConductanceScaling", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + q10_conductance_scalings=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannelScalable, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IonChannelScalable, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) if q10_conductance_scalings is None: self.q10_conductance_scalings = [] else: self.q10_conductance_scalings = q10_conductance_scalings self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, IonChannelScalable) + CurrentSubclassModule_, IonChannelScalable + ) if subclass is not None: return subclass(*args_, **kwargs_) if IonChannelScalable.subclass: return IonChannelScalable.subclass(*args_, **kwargs_) else: return IonChannelScalable(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.q10_conductance_scalings or - super(IonChannelScalable, self).hasContent_() + self.q10_conductance_scalings + or super(IonChannelScalable, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelScalable', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannelScalable') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannelScalable", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IonChannelScalable") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelScalable') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IonChannelScalable", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelScalable', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IonChannelScalable", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelScalable'): - super(IonChannelScalable, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelScalable') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IonChannelScalable", + ): + super(IonChannelScalable, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IonChannelScalable", + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelScalable', fromsubclass_=False, pretty_print=True): - super(IonChannelScalable, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannelScalable", + fromsubclass_=False, + pretty_print=True, + ): + super(IonChannelScalable, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for q10ConductanceScaling_ in self.q10_conductance_scalings: - q10ConductanceScaling_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10ConductanceScaling', pretty_print=pretty_print) + q10ConductanceScaling_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10ConductanceScaling", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -15187,98 +27697,985 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(IonChannelScalable, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'q10ConductanceScaling': + if nodeName_ == "q10ConductanceScaling": obj_ = Q10ConductanceScaling.factory(parent_object_=self) obj_.build(child_) self.q10_conductance_scalings.append(obj_) - obj_.original_tagname_ = 'q10ConductanceScaling' + obj_.original_tagname_ = "q10ConductanceScaling" super(IonChannelScalable, self).buildChildren(child_, node, nodeName_, True) + + # end class IonChannelScalable class NeuroMLDocument(Standalone): member_data_items_ = [ - MemberSpec_('includes', 'IncludeType', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IncludeType', u'name': u'include', u'minOccurs': u'0'}, None), - MemberSpec_('extracellular_properties', 'ExtracellularProperties', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExtracellularProperties', u'name': u'extracellularProperties', u'minOccurs': u'0'}, None), - MemberSpec_('intracellular_properties', 'IntracellularProperties', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IntracellularProperties', u'name': u'intracellularProperties', u'minOccurs': u'0'}, None), - MemberSpec_('morphology', 'Morphology', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Morphology', u'name': u'morphology', u'minOccurs': u'0'}, None), - MemberSpec_('ion_channel', 'IonChannel', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IonChannel', u'name': u'ionChannel', u'minOccurs': u'0'}, None), - MemberSpec_('ion_channel_hhs', 'IonChannelHH', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IonChannelHH', u'name': u'ionChannelHH', u'minOccurs': u'0'}, None), - MemberSpec_('ion_channel_v_shifts', 'IonChannelVShift', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IonChannelVShift', u'name': u'ionChannelVShift', u'minOccurs': u'0'}, None), - MemberSpec_('ion_channel_kses', 'IonChannelKS', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IonChannelKS', u'name': u'ionChannelKS', u'minOccurs': u'0'}, None), - MemberSpec_('decaying_pool_concentration_models', 'DecayingPoolConcentrationModel', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'DecayingPoolConcentrationModel', u'name': u'decayingPoolConcentrationModel', u'minOccurs': u'0'}, None), - MemberSpec_('fixed_factor_concentration_models', 'FixedFactorConcentrationModel', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'FixedFactorConcentrationModel', u'name': u'fixedFactorConcentrationModel', u'minOccurs': u'0'}, None), - MemberSpec_('alpha_current_synapses', 'AlphaCurrentSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AlphaCurrentSynapse', u'name': u'alphaCurrentSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('alpha_synapses', 'AlphaSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AlphaSynapse', u'name': u'alphaSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('exp_one_synapses', 'ExpOneSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpOneSynapse', u'name': u'expOneSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('exp_two_synapses', 'ExpTwoSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpTwoSynapse', u'name': u'expTwoSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('exp_three_synapses', 'ExpThreeSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpThreeSynapse', u'name': u'expThreeSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('blocking_plastic_synapses', 'BlockingPlasticSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'BlockingPlasticSynapse', u'name': u'blockingPlasticSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('double_synapses', 'DoubleSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'DoubleSynapse', u'name': u'doubleSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('gap_junctions', 'GapJunction', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GapJunction', u'name': u'gapJunction', u'minOccurs': u'0'}, None), - MemberSpec_('silent_synapses', 'SilentSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SilentSynapse', u'name': u'silentSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('linear_graded_synapses', 'LinearGradedSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'LinearGradedSynapse', u'name': u'linearGradedSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('graded_synapses', 'GradedSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GradedSynapse', u'name': u'gradedSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('biophysical_properties', 'BiophysicalProperties', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'BiophysicalProperties', u'name': u'biophysicalProperties', u'minOccurs': u'0'}, None), - MemberSpec_('cells', 'Cell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Cell', u'name': u'cell', u'minOccurs': u'0'}, None), - MemberSpec_('cell2_ca_poolses', 'Cell2CaPools', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Cell2CaPools', u'name': u'cell2CaPools', u'minOccurs': u'0'}, None), - MemberSpec_('base_cells', 'BaseCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'BaseCell', u'name': u'baseCell', u'minOccurs': u'0'}, None), - MemberSpec_('iaf_tau_cells', 'IafTauCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IafTauCell', u'name': u'iafTauCell', u'minOccurs': u'0'}, None), - MemberSpec_('iaf_tau_ref_cells', 'IafTauRefCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IafTauRefCell', u'name': u'iafTauRefCell', u'minOccurs': u'0'}, None), - MemberSpec_('iaf_cells', 'IafCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IafCell', u'name': u'iafCell', u'minOccurs': u'0'}, None), - MemberSpec_('iaf_ref_cells', 'IafRefCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IafRefCell', u'name': u'iafRefCell', u'minOccurs': u'0'}, None), - MemberSpec_('izhikevich_cells', 'IzhikevichCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IzhikevichCell', u'name': u'izhikevichCell', u'minOccurs': u'0'}, None), - MemberSpec_('izhikevich2007_cells', 'Izhikevich2007Cell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Izhikevich2007Cell', u'name': u'izhikevich2007Cell', u'minOccurs': u'0'}, None), - MemberSpec_('ad_ex_ia_f_cells', 'AdExIaFCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AdExIaFCell', u'name': u'adExIaFCell', u'minOccurs': u'0'}, None), - MemberSpec_('fitz_hugh_nagumo_cells', 'FitzHughNagumoCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'FitzHughNagumoCell', u'name': u'fitzHughNagumoCell', u'minOccurs': u'0'}, None), - MemberSpec_('fitz_hugh_nagumo1969_cells', 'FitzHughNagumo1969Cell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'FitzHughNagumo1969Cell', u'name': u'fitzHughNagumo1969Cell', u'minOccurs': u'0'}, None), - MemberSpec_('pinsky_rinzel_ca3_cells', 'PinskyRinzelCA3Cell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PinskyRinzelCA3Cell', u'name': u'pinskyRinzelCA3Cell', u'minOccurs': u'0'}, None), - MemberSpec_('pulse_generators', 'PulseGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PulseGenerator', u'name': u'pulseGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('pulse_generator_dls', 'PulseGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PulseGeneratorDL', u'name': u'pulseGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('sine_generators', 'SineGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SineGenerator', u'name': u'sineGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('sine_generator_dls', 'SineGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SineGeneratorDL', u'name': u'sineGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('ramp_generators', 'RampGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'RampGenerator', u'name': u'rampGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('ramp_generator_dls', 'RampGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'RampGeneratorDL', u'name': u'rampGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('compound_inputs', 'CompoundInput', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'CompoundInput', u'name': u'compoundInput', u'minOccurs': u'0'}, None), - MemberSpec_('compound_input_dls', 'CompoundInputDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'CompoundInputDL', u'name': u'compoundInputDL', u'minOccurs': u'0'}, None), - MemberSpec_('voltage_clamps', 'VoltageClamp', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VoltageClamp', u'name': u'voltageClamp', u'minOccurs': u'0'}, None), - MemberSpec_('voltage_clamp_triples', 'VoltageClampTriple', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VoltageClampTriple', u'name': u'voltageClampTriple', u'minOccurs': u'0'}, None), - MemberSpec_('spike_arrays', 'SpikeArray', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeArray', u'name': u'spikeArray', u'minOccurs': u'0'}, None), - MemberSpec_('timed_synaptic_inputs', 'TimedSynapticInput', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'TimedSynapticInput', u'name': u'timedSynapticInput', u'minOccurs': u'0'}, None), - MemberSpec_('spike_generators', 'SpikeGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeGenerator', u'name': u'spikeGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('spike_generator_randoms', 'SpikeGeneratorRandom', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeGeneratorRandom', u'name': u'spikeGeneratorRandom', u'minOccurs': u'0'}, None), - MemberSpec_('spike_generator_poissons', 'SpikeGeneratorPoisson', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeGeneratorPoisson', u'name': u'spikeGeneratorPoisson', u'minOccurs': u'0'}, None), - MemberSpec_('spike_generator_ref_poissons', 'SpikeGeneratorRefPoisson', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeGeneratorRefPoisson', u'name': u'spikeGeneratorRefPoisson', u'minOccurs': u'0'}, None), - MemberSpec_('poisson_firing_synapses', 'PoissonFiringSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PoissonFiringSynapse', u'name': u'poissonFiringSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('transient_poisson_firing_synapses', 'TransientPoissonFiringSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'TransientPoissonFiringSynapse', u'name': u'transientPoissonFiringSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('IF_curr_alpha', 'IF_curr_alpha', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IF_curr_alpha', u'name': u'IF_curr_alpha', u'minOccurs': u'0'}, None), - MemberSpec_('IF_curr_exp', 'IF_curr_exp', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IF_curr_exp', u'name': u'IF_curr_exp', u'minOccurs': u'0'}, None), - MemberSpec_('IF_cond_alpha', 'IF_cond_alpha', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IF_cond_alpha', u'name': u'IF_cond_alpha', u'minOccurs': u'0'}, None), - MemberSpec_('IF_cond_exp', 'IF_cond_exp', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IF_cond_exp', u'name': u'IF_cond_exp', u'minOccurs': u'0'}, None), - MemberSpec_('EIF_cond_exp_isfa_ista', 'EIF_cond_exp_isfa_ista', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'EIF_cond_exp_isfa_ista', u'name': u'EIF_cond_exp_isfa_ista', u'minOccurs': u'0'}, None), - MemberSpec_('EIF_cond_alpha_isfa_ista', 'EIF_cond_alpha_isfa_ista', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'EIF_cond_alpha_isfa_ista', u'name': u'EIF_cond_alpha_isfa_ista', u'minOccurs': u'0'}, None), - MemberSpec_('HH_cond_exp', 'HH_cond_exp', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'HH_cond_exp', u'name': u'HH_cond_exp', u'minOccurs': u'0'}, None), - MemberSpec_('exp_cond_synapses', 'ExpCondSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpCondSynapse', u'name': u'expCondSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('alpha_cond_synapses', 'AlphaCondSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AlphaCondSynapse', u'name': u'alphaCondSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('exp_curr_synapses', 'ExpCurrSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpCurrSynapse', u'name': u'expCurrSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('alpha_curr_synapses', 'AlphaCurrSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AlphaCurrSynapse', u'name': u'alphaCurrSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('SpikeSourcePoisson', 'SpikeSourcePoisson', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeSourcePoisson', u'name': u'SpikeSourcePoisson', u'minOccurs': u'0'}, None), - MemberSpec_('networks', 'Network', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Network', u'name': u'network', u'minOccurs': u'0'}, None), - MemberSpec_('ComponentType', 'ComponentType', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ComponentType', u'name': u'ComponentType', u'minOccurs': u'0'}, None), + MemberSpec_( + "includes", + "IncludeType", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IncludeType", + u"name": u"include", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "extracellular_properties", + "ExtracellularProperties", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ExtracellularProperties", + u"name": u"extracellularProperties", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "intracellular_properties", + "IntracellularProperties", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IntracellularProperties", + u"name": u"intracellularProperties", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "morphology", + "Morphology", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Morphology", + u"name": u"morphology", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "ion_channel", + "IonChannel", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IonChannel", + u"name": u"ionChannel", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "ion_channel_hhs", + "IonChannelHH", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IonChannelHH", + u"name": u"ionChannelHH", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "ion_channel_v_shifts", + "IonChannelVShift", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IonChannelVShift", + u"name": u"ionChannelVShift", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "ion_channel_kses", + "IonChannelKS", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IonChannelKS", + u"name": u"ionChannelKS", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "decaying_pool_concentration_models", + "DecayingPoolConcentrationModel", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"DecayingPoolConcentrationModel", + u"name": u"decayingPoolConcentrationModel", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "fixed_factor_concentration_models", + "FixedFactorConcentrationModel", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"FixedFactorConcentrationModel", + u"name": u"fixedFactorConcentrationModel", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "alpha_current_synapses", + "AlphaCurrentSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"AlphaCurrentSynapse", + u"name": u"alphaCurrentSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "alpha_synapses", + "AlphaSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"AlphaSynapse", + u"name": u"alphaSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "exp_one_synapses", + "ExpOneSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ExpOneSynapse", + u"name": u"expOneSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "exp_two_synapses", + "ExpTwoSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ExpTwoSynapse", + u"name": u"expTwoSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "exp_three_synapses", + "ExpThreeSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ExpThreeSynapse", + u"name": u"expThreeSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "blocking_plastic_synapses", + "BlockingPlasticSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"BlockingPlasticSynapse", + u"name": u"blockingPlasticSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "double_synapses", + "DoubleSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"DoubleSynapse", + u"name": u"doubleSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "gap_junctions", + "GapJunction", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"GapJunction", + u"name": u"gapJunction", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "silent_synapses", + "SilentSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SilentSynapse", + u"name": u"silentSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "linear_graded_synapses", + "LinearGradedSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"LinearGradedSynapse", + u"name": u"linearGradedSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "graded_synapses", + "GradedSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"GradedSynapse", + u"name": u"gradedSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "biophysical_properties", + "BiophysicalProperties", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"BiophysicalProperties", + u"name": u"biophysicalProperties", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "cells", + "Cell", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Cell", + u"name": u"cell", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "cell2_ca_poolses", + "Cell2CaPools", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Cell2CaPools", + u"name": u"cell2CaPools", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "base_cells", + "BaseCell", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"BaseCell", + u"name": u"baseCell", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "iaf_tau_cells", + "IafTauCell", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IafTauCell", + u"name": u"iafTauCell", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "iaf_tau_ref_cells", + "IafTauRefCell", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IafTauRefCell", + u"name": u"iafTauRefCell", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "iaf_cells", + "IafCell", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IafCell", + u"name": u"iafCell", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "iaf_ref_cells", + "IafRefCell", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IafRefCell", + u"name": u"iafRefCell", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "izhikevich_cells", + "IzhikevichCell", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IzhikevichCell", + u"name": u"izhikevichCell", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "izhikevich2007_cells", + "Izhikevich2007Cell", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Izhikevich2007Cell", + u"name": u"izhikevich2007Cell", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "ad_ex_ia_f_cells", + "AdExIaFCell", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"AdExIaFCell", + u"name": u"adExIaFCell", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "fitz_hugh_nagumo_cells", + "FitzHughNagumoCell", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"FitzHughNagumoCell", + u"name": u"fitzHughNagumoCell", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "fitz_hugh_nagumo1969_cells", + "FitzHughNagumo1969Cell", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"FitzHughNagumo1969Cell", + u"name": u"fitzHughNagumo1969Cell", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "pinsky_rinzel_ca3_cells", + "PinskyRinzelCA3Cell", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"PinskyRinzelCA3Cell", + u"name": u"pinskyRinzelCA3Cell", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "pulse_generators", + "PulseGenerator", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"PulseGenerator", + u"name": u"pulseGenerator", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "pulse_generator_dls", + "PulseGeneratorDL", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"PulseGeneratorDL", + u"name": u"pulseGeneratorDL", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "sine_generators", + "SineGenerator", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SineGenerator", + u"name": u"sineGenerator", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "sine_generator_dls", + "SineGeneratorDL", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SineGeneratorDL", + u"name": u"sineGeneratorDL", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "ramp_generators", + "RampGenerator", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"RampGenerator", + u"name": u"rampGenerator", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "ramp_generator_dls", + "RampGeneratorDL", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"RampGeneratorDL", + u"name": u"rampGeneratorDL", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "compound_inputs", + "CompoundInput", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"CompoundInput", + u"name": u"compoundInput", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "compound_input_dls", + "CompoundInputDL", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"CompoundInputDL", + u"name": u"compoundInputDL", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "voltage_clamps", + "VoltageClamp", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"VoltageClamp", + u"name": u"voltageClamp", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "voltage_clamp_triples", + "VoltageClampTriple", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"VoltageClampTriple", + u"name": u"voltageClampTriple", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "spike_arrays", + "SpikeArray", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SpikeArray", + u"name": u"spikeArray", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "timed_synaptic_inputs", + "TimedSynapticInput", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"TimedSynapticInput", + u"name": u"timedSynapticInput", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "spike_generators", + "SpikeGenerator", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SpikeGenerator", + u"name": u"spikeGenerator", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "spike_generator_randoms", + "SpikeGeneratorRandom", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SpikeGeneratorRandom", + u"name": u"spikeGeneratorRandom", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "spike_generator_poissons", + "SpikeGeneratorPoisson", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SpikeGeneratorPoisson", + u"name": u"spikeGeneratorPoisson", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "spike_generator_ref_poissons", + "SpikeGeneratorRefPoisson", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SpikeGeneratorRefPoisson", + u"name": u"spikeGeneratorRefPoisson", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "poisson_firing_synapses", + "PoissonFiringSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"PoissonFiringSynapse", + u"name": u"poissonFiringSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "transient_poisson_firing_synapses", + "TransientPoissonFiringSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"TransientPoissonFiringSynapse", + u"name": u"transientPoissonFiringSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "IF_curr_alpha", + "IF_curr_alpha", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IF_curr_alpha", + u"name": u"IF_curr_alpha", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "IF_curr_exp", + "IF_curr_exp", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IF_curr_exp", + u"name": u"IF_curr_exp", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "IF_cond_alpha", + "IF_cond_alpha", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IF_cond_alpha", + u"name": u"IF_cond_alpha", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "IF_cond_exp", + "IF_cond_exp", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"IF_cond_exp", + u"name": u"IF_cond_exp", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "EIF_cond_exp_isfa_ista", + "EIF_cond_exp_isfa_ista", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"EIF_cond_exp_isfa_ista", + u"name": u"EIF_cond_exp_isfa_ista", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "EIF_cond_alpha_isfa_ista", + "EIF_cond_alpha_isfa_ista", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"EIF_cond_alpha_isfa_ista", + u"name": u"EIF_cond_alpha_isfa_ista", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "HH_cond_exp", + "HH_cond_exp", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"HH_cond_exp", + u"name": u"HH_cond_exp", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "exp_cond_synapses", + "ExpCondSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ExpCondSynapse", + u"name": u"expCondSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "alpha_cond_synapses", + "AlphaCondSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"AlphaCondSynapse", + u"name": u"alphaCondSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "exp_curr_synapses", + "ExpCurrSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ExpCurrSynapse", + u"name": u"expCurrSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "alpha_curr_synapses", + "AlphaCurrSynapse", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"AlphaCurrSynapse", + u"name": u"alphaCurrSynapse", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "SpikeSourcePoisson", + "SpikeSourcePoisson", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"SpikeSourcePoisson", + u"name": u"SpikeSourcePoisson", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "networks", + "Network", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Network", + u"name": u"network", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "ComponentType", + "ComponentType", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ComponentType", + u"name": u"ComponentType", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, includes=None, extracellular_properties=None, intracellular_properties=None, morphology=None, ion_channel=None, ion_channel_hhs=None, ion_channel_v_shifts=None, ion_channel_kses=None, decaying_pool_concentration_models=None, fixed_factor_concentration_models=None, alpha_current_synapses=None, alpha_synapses=None, exp_one_synapses=None, exp_two_synapses=None, exp_three_synapses=None, blocking_plastic_synapses=None, double_synapses=None, gap_junctions=None, silent_synapses=None, linear_graded_synapses=None, graded_synapses=None, biophysical_properties=None, cells=None, cell2_ca_poolses=None, base_cells=None, iaf_tau_cells=None, iaf_tau_ref_cells=None, iaf_cells=None, iaf_ref_cells=None, izhikevich_cells=None, izhikevich2007_cells=None, ad_ex_ia_f_cells=None, fitz_hugh_nagumo_cells=None, fitz_hugh_nagumo1969_cells=None, pinsky_rinzel_ca3_cells=None, pulse_generators=None, pulse_generator_dls=None, sine_generators=None, sine_generator_dls=None, ramp_generators=None, ramp_generator_dls=None, compound_inputs=None, compound_input_dls=None, voltage_clamps=None, voltage_clamp_triples=None, spike_arrays=None, timed_synaptic_inputs=None, spike_generators=None, spike_generator_randoms=None, spike_generator_poissons=None, spike_generator_ref_poissons=None, poisson_firing_synapses=None, transient_poisson_firing_synapses=None, IF_curr_alpha=None, IF_curr_exp=None, IF_cond_alpha=None, IF_cond_exp=None, EIF_cond_exp_isfa_ista=None, EIF_cond_alpha_isfa_ista=None, HH_cond_exp=None, exp_cond_synapses=None, alpha_cond_synapses=None, exp_curr_synapses=None, alpha_curr_synapses=None, SpikeSourcePoisson=None, networks=None, ComponentType=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + includes=None, + extracellular_properties=None, + intracellular_properties=None, + morphology=None, + ion_channel=None, + ion_channel_hhs=None, + ion_channel_v_shifts=None, + ion_channel_kses=None, + decaying_pool_concentration_models=None, + fixed_factor_concentration_models=None, + alpha_current_synapses=None, + alpha_synapses=None, + exp_one_synapses=None, + exp_two_synapses=None, + exp_three_synapses=None, + blocking_plastic_synapses=None, + double_synapses=None, + gap_junctions=None, + silent_synapses=None, + linear_graded_synapses=None, + graded_synapses=None, + biophysical_properties=None, + cells=None, + cell2_ca_poolses=None, + base_cells=None, + iaf_tau_cells=None, + iaf_tau_ref_cells=None, + iaf_cells=None, + iaf_ref_cells=None, + izhikevich_cells=None, + izhikevich2007_cells=None, + ad_ex_ia_f_cells=None, + fitz_hugh_nagumo_cells=None, + fitz_hugh_nagumo1969_cells=None, + pinsky_rinzel_ca3_cells=None, + pulse_generators=None, + pulse_generator_dls=None, + sine_generators=None, + sine_generator_dls=None, + ramp_generators=None, + ramp_generator_dls=None, + compound_inputs=None, + compound_input_dls=None, + voltage_clamps=None, + voltage_clamp_triples=None, + spike_arrays=None, + timed_synaptic_inputs=None, + spike_generators=None, + spike_generator_randoms=None, + spike_generator_poissons=None, + spike_generator_ref_poissons=None, + poisson_firing_synapses=None, + transient_poisson_firing_synapses=None, + IF_curr_alpha=None, + IF_curr_exp=None, + IF_cond_alpha=None, + IF_cond_exp=None, + EIF_cond_exp_isfa_ista=None, + EIF_cond_alpha_isfa_ista=None, + HH_cond_exp=None, + exp_cond_synapses=None, + alpha_cond_synapses=None, + exp_curr_synapses=None, + alpha_curr_synapses=None, + SpikeSourcePoisson=None, + networks=None, + ComponentType=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(NeuroMLDocument, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(NeuroMLDocument, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) if includes is None: self.includes = [] else: @@ -15547,254 +28944,773 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti self.ComponentType = [] else: self.ComponentType = ComponentType + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, NeuroMLDocument) + subclass = getSubclassFromModule_(CurrentSubclassModule_, NeuroMLDocument) if subclass is not None: return subclass(*args_, **kwargs_) if NeuroMLDocument.subclass: return NeuroMLDocument.subclass(*args_, **kwargs_) else: return NeuroMLDocument(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.includes or - self.extracellular_properties or - self.intracellular_properties or - self.morphology or - self.ion_channel or - self.ion_channel_hhs or - self.ion_channel_v_shifts or - self.ion_channel_kses or - self.decaying_pool_concentration_models or - self.fixed_factor_concentration_models or - self.alpha_current_synapses or - self.alpha_synapses or - self.exp_one_synapses or - self.exp_two_synapses or - self.exp_three_synapses or - self.blocking_plastic_synapses or - self.double_synapses or - self.gap_junctions or - self.silent_synapses or - self.linear_graded_synapses or - self.graded_synapses or - self.biophysical_properties or - self.cells or - self.cell2_ca_poolses or - self.base_cells or - self.iaf_tau_cells or - self.iaf_tau_ref_cells or - self.iaf_cells or - self.iaf_ref_cells or - self.izhikevich_cells or - self.izhikevich2007_cells or - self.ad_ex_ia_f_cells or - self.fitz_hugh_nagumo_cells or - self.fitz_hugh_nagumo1969_cells or - self.pinsky_rinzel_ca3_cells or - self.pulse_generators or - self.pulse_generator_dls or - self.sine_generators or - self.sine_generator_dls or - self.ramp_generators or - self.ramp_generator_dls or - self.compound_inputs or - self.compound_input_dls or - self.voltage_clamps or - self.voltage_clamp_triples or - self.spike_arrays or - self.timed_synaptic_inputs or - self.spike_generators or - self.spike_generator_randoms or - self.spike_generator_poissons or - self.spike_generator_ref_poissons or - self.poisson_firing_synapses or - self.transient_poisson_firing_synapses or - self.IF_curr_alpha or - self.IF_curr_exp or - self.IF_cond_alpha or - self.IF_cond_exp or - self.EIF_cond_exp_isfa_ista or - self.EIF_cond_alpha_isfa_ista or - self.HH_cond_exp or - self.exp_cond_synapses or - self.alpha_cond_synapses or - self.exp_curr_synapses or - self.alpha_curr_synapses or - self.SpikeSourcePoisson or - self.networks or - self.ComponentType or - super(NeuroMLDocument, self).hasContent_() + self.includes + or self.extracellular_properties + or self.intracellular_properties + or self.morphology + or self.ion_channel + or self.ion_channel_hhs + or self.ion_channel_v_shifts + or self.ion_channel_kses + or self.decaying_pool_concentration_models + or self.fixed_factor_concentration_models + or self.alpha_current_synapses + or self.alpha_synapses + or self.exp_one_synapses + or self.exp_two_synapses + or self.exp_three_synapses + or self.blocking_plastic_synapses + or self.double_synapses + or self.gap_junctions + or self.silent_synapses + or self.linear_graded_synapses + or self.graded_synapses + or self.biophysical_properties + or self.cells + or self.cell2_ca_poolses + or self.base_cells + or self.iaf_tau_cells + or self.iaf_tau_ref_cells + or self.iaf_cells + or self.iaf_ref_cells + or self.izhikevich_cells + or self.izhikevich2007_cells + or self.ad_ex_ia_f_cells + or self.fitz_hugh_nagumo_cells + or self.fitz_hugh_nagumo1969_cells + or self.pinsky_rinzel_ca3_cells + or self.pulse_generators + or self.pulse_generator_dls + or self.sine_generators + or self.sine_generator_dls + or self.ramp_generators + or self.ramp_generator_dls + or self.compound_inputs + or self.compound_input_dls + or self.voltage_clamps + or self.voltage_clamp_triples + or self.spike_arrays + or self.timed_synaptic_inputs + or self.spike_generators + or self.spike_generator_randoms + or self.spike_generator_poissons + or self.spike_generator_ref_poissons + or self.poisson_firing_synapses + or self.transient_poisson_firing_synapses + or self.IF_curr_alpha + or self.IF_curr_exp + or self.IF_cond_alpha + or self.IF_cond_exp + or self.EIF_cond_exp_isfa_ista + or self.EIF_cond_alpha_isfa_ista + or self.HH_cond_exp + or self.exp_cond_synapses + or self.alpha_cond_synapses + or self.exp_curr_synapses + or self.alpha_curr_synapses + or self.SpikeSourcePoisson + or self.networks + or self.ComponentType + or super(NeuroMLDocument, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NeuroMLDocument', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('NeuroMLDocument') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="NeuroMLDocument", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("NeuroMLDocument") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NeuroMLDocument') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="NeuroMLDocument" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NeuroMLDocument', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="NeuroMLDocument", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NeuroMLDocument'): - super(NeuroMLDocument, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NeuroMLDocument') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NeuroMLDocument', fromsubclass_=False, pretty_print=True): - super(NeuroMLDocument, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="NeuroMLDocument", + ): + super(NeuroMLDocument, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="NeuroMLDocument" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="NeuroMLDocument", + fromsubclass_=False, + pretty_print=True, + ): + super(NeuroMLDocument, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for include_ in self.includes: - include_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='include', pretty_print=pretty_print) + include_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="include", + pretty_print=pretty_print, + ) for extracellularProperties_ in self.extracellular_properties: - extracellularProperties_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='extracellularProperties', pretty_print=pretty_print) + extracellularProperties_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="extracellularProperties", + pretty_print=pretty_print, + ) for intracellularProperties_ in self.intracellular_properties: - intracellularProperties_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='intracellularProperties', pretty_print=pretty_print) + intracellularProperties_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="intracellularProperties", + pretty_print=pretty_print, + ) for morphology_ in self.morphology: - morphology_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='morphology', pretty_print=pretty_print) + morphology_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="morphology", + pretty_print=pretty_print, + ) for ionChannel_ in self.ion_channel: - ionChannel_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ionChannel', pretty_print=pretty_print) + ionChannel_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ionChannel", + pretty_print=pretty_print, + ) for ionChannelHH_ in self.ion_channel_hhs: - ionChannelHH_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ionChannelHH', pretty_print=pretty_print) + ionChannelHH_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ionChannelHH", + pretty_print=pretty_print, + ) for ionChannelVShift_ in self.ion_channel_v_shifts: - ionChannelVShift_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ionChannelVShift', pretty_print=pretty_print) + ionChannelVShift_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ionChannelVShift", + pretty_print=pretty_print, + ) for ionChannelKS_ in self.ion_channel_kses: - ionChannelKS_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ionChannelKS', pretty_print=pretty_print) + ionChannelKS_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ionChannelKS", + pretty_print=pretty_print, + ) for decayingPoolConcentrationModel_ in self.decaying_pool_concentration_models: - decayingPoolConcentrationModel_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='decayingPoolConcentrationModel', pretty_print=pretty_print) + decayingPoolConcentrationModel_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="decayingPoolConcentrationModel", + pretty_print=pretty_print, + ) for fixedFactorConcentrationModel_ in self.fixed_factor_concentration_models: - fixedFactorConcentrationModel_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='fixedFactorConcentrationModel', pretty_print=pretty_print) + fixedFactorConcentrationModel_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="fixedFactorConcentrationModel", + pretty_print=pretty_print, + ) for alphaCurrentSynapse_ in self.alpha_current_synapses: - alphaCurrentSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='alphaCurrentSynapse', pretty_print=pretty_print) + alphaCurrentSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="alphaCurrentSynapse", + pretty_print=pretty_print, + ) for alphaSynapse_ in self.alpha_synapses: - alphaSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='alphaSynapse', pretty_print=pretty_print) + alphaSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="alphaSynapse", + pretty_print=pretty_print, + ) for expOneSynapse_ in self.exp_one_synapses: - expOneSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expOneSynapse', pretty_print=pretty_print) + expOneSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="expOneSynapse", + pretty_print=pretty_print, + ) for expTwoSynapse_ in self.exp_two_synapses: - expTwoSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expTwoSynapse', pretty_print=pretty_print) + expTwoSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="expTwoSynapse", + pretty_print=pretty_print, + ) for expThreeSynapse_ in self.exp_three_synapses: - expThreeSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expThreeSynapse', pretty_print=pretty_print) + expThreeSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="expThreeSynapse", + pretty_print=pretty_print, + ) for blockingPlasticSynapse_ in self.blocking_plastic_synapses: - blockingPlasticSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='blockingPlasticSynapse', pretty_print=pretty_print) + blockingPlasticSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="blockingPlasticSynapse", + pretty_print=pretty_print, + ) for doubleSynapse_ in self.double_synapses: - doubleSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='doubleSynapse', pretty_print=pretty_print) + doubleSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="doubleSynapse", + pretty_print=pretty_print, + ) for gapJunction_ in self.gap_junctions: - gapJunction_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gapJunction', pretty_print=pretty_print) + gapJunction_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gapJunction", + pretty_print=pretty_print, + ) for silentSynapse_ in self.silent_synapses: - silentSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='silentSynapse', pretty_print=pretty_print) + silentSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="silentSynapse", + pretty_print=pretty_print, + ) for linearGradedSynapse_ in self.linear_graded_synapses: - linearGradedSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='linearGradedSynapse', pretty_print=pretty_print) + linearGradedSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="linearGradedSynapse", + pretty_print=pretty_print, + ) for gradedSynapse_ in self.graded_synapses: - gradedSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gradedSynapse', pretty_print=pretty_print) + gradedSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gradedSynapse", + pretty_print=pretty_print, + ) for biophysicalProperties_ in self.biophysical_properties: - biophysicalProperties_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='biophysicalProperties', pretty_print=pretty_print) + biophysicalProperties_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="biophysicalProperties", + pretty_print=pretty_print, + ) for cell_ in self.cells: - cell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='cell', pretty_print=pretty_print) + cell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="cell", + pretty_print=pretty_print, + ) for cell2CaPools_ in self.cell2_ca_poolses: - cell2CaPools_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='cell2CaPools', pretty_print=pretty_print) + cell2CaPools_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="cell2CaPools", + pretty_print=pretty_print, + ) for baseCell_ in self.base_cells: - baseCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='baseCell', pretty_print=pretty_print) + baseCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="baseCell", + pretty_print=pretty_print, + ) for iafTauCell_ in self.iaf_tau_cells: - iafTauCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='iafTauCell', pretty_print=pretty_print) + iafTauCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="iafTauCell", + pretty_print=pretty_print, + ) for iafTauRefCell_ in self.iaf_tau_ref_cells: - iafTauRefCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='iafTauRefCell', pretty_print=pretty_print) + iafTauRefCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="iafTauRefCell", + pretty_print=pretty_print, + ) for iafCell_ in self.iaf_cells: - iafCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='iafCell', pretty_print=pretty_print) + iafCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="iafCell", + pretty_print=pretty_print, + ) for iafRefCell_ in self.iaf_ref_cells: - iafRefCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='iafRefCell', pretty_print=pretty_print) + iafRefCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="iafRefCell", + pretty_print=pretty_print, + ) for izhikevichCell_ in self.izhikevich_cells: - izhikevichCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='izhikevichCell', pretty_print=pretty_print) + izhikevichCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="izhikevichCell", + pretty_print=pretty_print, + ) for izhikevich2007Cell_ in self.izhikevich2007_cells: - izhikevich2007Cell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='izhikevich2007Cell', pretty_print=pretty_print) + izhikevich2007Cell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="izhikevich2007Cell", + pretty_print=pretty_print, + ) for adExIaFCell_ in self.ad_ex_ia_f_cells: - adExIaFCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='adExIaFCell', pretty_print=pretty_print) + adExIaFCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="adExIaFCell", + pretty_print=pretty_print, + ) for fitzHughNagumoCell_ in self.fitz_hugh_nagumo_cells: - fitzHughNagumoCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='fitzHughNagumoCell', pretty_print=pretty_print) + fitzHughNagumoCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="fitzHughNagumoCell", + pretty_print=pretty_print, + ) for fitzHughNagumo1969Cell_ in self.fitz_hugh_nagumo1969_cells: - fitzHughNagumo1969Cell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='fitzHughNagumo1969Cell', pretty_print=pretty_print) + fitzHughNagumo1969Cell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="fitzHughNagumo1969Cell", + pretty_print=pretty_print, + ) for pinskyRinzelCA3Cell_ in self.pinsky_rinzel_ca3_cells: - pinskyRinzelCA3Cell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pinskyRinzelCA3Cell', pretty_print=pretty_print) + pinskyRinzelCA3Cell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="pinskyRinzelCA3Cell", + pretty_print=pretty_print, + ) for pulseGenerator_ in self.pulse_generators: - pulseGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pulseGenerator', pretty_print=pretty_print) + pulseGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="pulseGenerator", + pretty_print=pretty_print, + ) for pulseGeneratorDL_ in self.pulse_generator_dls: - pulseGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pulseGeneratorDL', pretty_print=pretty_print) + pulseGeneratorDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="pulseGeneratorDL", + pretty_print=pretty_print, + ) for sineGenerator_ in self.sine_generators: - sineGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='sineGenerator', pretty_print=pretty_print) + sineGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="sineGenerator", + pretty_print=pretty_print, + ) for sineGeneratorDL_ in self.sine_generator_dls: - sineGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='sineGeneratorDL', pretty_print=pretty_print) + sineGeneratorDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="sineGeneratorDL", + pretty_print=pretty_print, + ) for rampGenerator_ in self.ramp_generators: - rampGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='rampGenerator', pretty_print=pretty_print) + rampGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="rampGenerator", + pretty_print=pretty_print, + ) for rampGeneratorDL_ in self.ramp_generator_dls: - rampGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='rampGeneratorDL', pretty_print=pretty_print) + rampGeneratorDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="rampGeneratorDL", + pretty_print=pretty_print, + ) for compoundInput_ in self.compound_inputs: - compoundInput_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='compoundInput', pretty_print=pretty_print) + compoundInput_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="compoundInput", + pretty_print=pretty_print, + ) for compoundInputDL_ in self.compound_input_dls: - compoundInputDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='compoundInputDL', pretty_print=pretty_print) + compoundInputDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="compoundInputDL", + pretty_print=pretty_print, + ) for voltageClamp_ in self.voltage_clamps: - voltageClamp_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='voltageClamp', pretty_print=pretty_print) + voltageClamp_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="voltageClamp", + pretty_print=pretty_print, + ) for voltageClampTriple_ in self.voltage_clamp_triples: - voltageClampTriple_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='voltageClampTriple', pretty_print=pretty_print) + voltageClampTriple_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="voltageClampTriple", + pretty_print=pretty_print, + ) for spikeArray_ in self.spike_arrays: - spikeArray_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeArray', pretty_print=pretty_print) + spikeArray_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spikeArray", + pretty_print=pretty_print, + ) for timedSynapticInput_ in self.timed_synaptic_inputs: - timedSynapticInput_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timedSynapticInput', pretty_print=pretty_print) + timedSynapticInput_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timedSynapticInput", + pretty_print=pretty_print, + ) for spikeGenerator_ in self.spike_generators: - spikeGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeGenerator', pretty_print=pretty_print) + spikeGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spikeGenerator", + pretty_print=pretty_print, + ) for spikeGeneratorRandom_ in self.spike_generator_randoms: - spikeGeneratorRandom_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeGeneratorRandom', pretty_print=pretty_print) + spikeGeneratorRandom_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spikeGeneratorRandom", + pretty_print=pretty_print, + ) for spikeGeneratorPoisson_ in self.spike_generator_poissons: - spikeGeneratorPoisson_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeGeneratorPoisson', pretty_print=pretty_print) + spikeGeneratorPoisson_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spikeGeneratorPoisson", + pretty_print=pretty_print, + ) for spikeGeneratorRefPoisson_ in self.spike_generator_ref_poissons: - spikeGeneratorRefPoisson_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeGeneratorRefPoisson', pretty_print=pretty_print) + spikeGeneratorRefPoisson_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spikeGeneratorRefPoisson", + pretty_print=pretty_print, + ) for poissonFiringSynapse_ in self.poisson_firing_synapses: - poissonFiringSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='poissonFiringSynapse', pretty_print=pretty_print) + poissonFiringSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="poissonFiringSynapse", + pretty_print=pretty_print, + ) for transientPoissonFiringSynapse_ in self.transient_poisson_firing_synapses: - transientPoissonFiringSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='transientPoissonFiringSynapse', pretty_print=pretty_print) + transientPoissonFiringSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="transientPoissonFiringSynapse", + pretty_print=pretty_print, + ) for IF_curr_alpha_ in self.IF_curr_alpha: - IF_curr_alpha_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IF_curr_alpha', pretty_print=pretty_print) + IF_curr_alpha_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="IF_curr_alpha", + pretty_print=pretty_print, + ) for IF_curr_exp_ in self.IF_curr_exp: - IF_curr_exp_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IF_curr_exp', pretty_print=pretty_print) + IF_curr_exp_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="IF_curr_exp", + pretty_print=pretty_print, + ) for IF_cond_alpha_ in self.IF_cond_alpha: - IF_cond_alpha_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IF_cond_alpha', pretty_print=pretty_print) + IF_cond_alpha_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="IF_cond_alpha", + pretty_print=pretty_print, + ) for IF_cond_exp_ in self.IF_cond_exp: - IF_cond_exp_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IF_cond_exp', pretty_print=pretty_print) + IF_cond_exp_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="IF_cond_exp", + pretty_print=pretty_print, + ) for EIF_cond_exp_isfa_ista_ in self.EIF_cond_exp_isfa_ista: - EIF_cond_exp_isfa_ista_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EIF_cond_exp_isfa_ista', pretty_print=pretty_print) + EIF_cond_exp_isfa_ista_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="EIF_cond_exp_isfa_ista", + pretty_print=pretty_print, + ) for EIF_cond_alpha_isfa_ista_ in self.EIF_cond_alpha_isfa_ista: - EIF_cond_alpha_isfa_ista_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EIF_cond_alpha_isfa_ista', pretty_print=pretty_print) + EIF_cond_alpha_isfa_ista_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="EIF_cond_alpha_isfa_ista", + pretty_print=pretty_print, + ) for HH_cond_exp_ in self.HH_cond_exp: - HH_cond_exp_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='HH_cond_exp', pretty_print=pretty_print) + HH_cond_exp_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="HH_cond_exp", + pretty_print=pretty_print, + ) for expCondSynapse_ in self.exp_cond_synapses: - expCondSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expCondSynapse', pretty_print=pretty_print) + expCondSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="expCondSynapse", + pretty_print=pretty_print, + ) for alphaCondSynapse_ in self.alpha_cond_synapses: - alphaCondSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='alphaCondSynapse', pretty_print=pretty_print) + alphaCondSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="alphaCondSynapse", + pretty_print=pretty_print, + ) for expCurrSynapse_ in self.exp_curr_synapses: - expCurrSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expCurrSynapse', pretty_print=pretty_print) + expCurrSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="expCurrSynapse", + pretty_print=pretty_print, + ) for alphaCurrSynapse_ in self.alpha_curr_synapses: - alphaCurrSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='alphaCurrSynapse', pretty_print=pretty_print) + alphaCurrSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="alphaCurrSynapse", + pretty_print=pretty_print, + ) for SpikeSourcePoisson_ in self.SpikeSourcePoisson: - SpikeSourcePoisson_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SpikeSourcePoisson', pretty_print=pretty_print) + SpikeSourcePoisson_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="SpikeSourcePoisson", + pretty_print=pretty_print, + ) for network_ in self.networks: - network_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='network', pretty_print=pretty_print) + network_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="network", + pretty_print=pretty_print, + ) for ComponentType_ in self.ComponentType: - ComponentType_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ComponentType', pretty_print=pretty_print) + ComponentType_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ComponentType", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -15802,357 +29718,358 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(NeuroMLDocument, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'include': + if nodeName_ == "include": obj_ = IncludeType.factory(parent_object_=self) obj_.build(child_) self.includes.append(obj_) - obj_.original_tagname_ = 'include' - elif nodeName_ == 'extracellularProperties': + obj_.original_tagname_ = "include" + elif nodeName_ == "extracellularProperties": obj_ = ExtracellularProperties.factory(parent_object_=self) obj_.build(child_) self.extracellular_properties.append(obj_) - obj_.original_tagname_ = 'extracellularProperties' - elif nodeName_ == 'intracellularProperties': + obj_.original_tagname_ = "extracellularProperties" + elif nodeName_ == "intracellularProperties": class_obj_ = self.get_class_obj_(child_, IntracellularProperties) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.intracellular_properties.append(obj_) - obj_.original_tagname_ = 'intracellularProperties' - elif nodeName_ == 'morphology': + obj_.original_tagname_ = "intracellularProperties" + elif nodeName_ == "morphology": obj_ = Morphology.factory(parent_object_=self) obj_.build(child_) self.morphology.append(obj_) - obj_.original_tagname_ = 'morphology' - elif nodeName_ == 'ionChannel': + obj_.original_tagname_ = "morphology" + elif nodeName_ == "ionChannel": class_obj_ = self.get_class_obj_(child_, IonChannel) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.ion_channel.append(obj_) - obj_.original_tagname_ = 'ionChannel' - elif nodeName_ == 'ionChannelHH': + obj_.original_tagname_ = "ionChannel" + elif nodeName_ == "ionChannelHH": obj_ = IonChannelHH.factory(parent_object_=self) obj_.build(child_) self.ion_channel_hhs.append(obj_) - obj_.original_tagname_ = 'ionChannelHH' - elif nodeName_ == 'ionChannelVShift': + obj_.original_tagname_ = "ionChannelHH" + elif nodeName_ == "ionChannelVShift": obj_ = IonChannelVShift.factory(parent_object_=self) obj_.build(child_) self.ion_channel_v_shifts.append(obj_) - obj_.original_tagname_ = 'ionChannelVShift' - elif nodeName_ == 'ionChannelKS': + obj_.original_tagname_ = "ionChannelVShift" + elif nodeName_ == "ionChannelKS": obj_ = IonChannelKS.factory(parent_object_=self) obj_.build(child_) self.ion_channel_kses.append(obj_) - obj_.original_tagname_ = 'ionChannelKS' - elif nodeName_ == 'decayingPoolConcentrationModel': + obj_.original_tagname_ = "ionChannelKS" + elif nodeName_ == "decayingPoolConcentrationModel": class_obj_ = self.get_class_obj_(child_, DecayingPoolConcentrationModel) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.decaying_pool_concentration_models.append(obj_) - obj_.original_tagname_ = 'decayingPoolConcentrationModel' - elif nodeName_ == 'fixedFactorConcentrationModel': + obj_.original_tagname_ = "decayingPoolConcentrationModel" + elif nodeName_ == "fixedFactorConcentrationModel": obj_ = FixedFactorConcentrationModel.factory(parent_object_=self) obj_.build(child_) self.fixed_factor_concentration_models.append(obj_) - obj_.original_tagname_ = 'fixedFactorConcentrationModel' - elif nodeName_ == 'alphaCurrentSynapse': + obj_.original_tagname_ = "fixedFactorConcentrationModel" + elif nodeName_ == "alphaCurrentSynapse": obj_ = AlphaCurrentSynapse.factory(parent_object_=self) obj_.build(child_) self.alpha_current_synapses.append(obj_) - obj_.original_tagname_ = 'alphaCurrentSynapse' - elif nodeName_ == 'alphaSynapse': + obj_.original_tagname_ = "alphaCurrentSynapse" + elif nodeName_ == "alphaSynapse": obj_ = AlphaSynapse.factory(parent_object_=self) obj_.build(child_) self.alpha_synapses.append(obj_) - obj_.original_tagname_ = 'alphaSynapse' - elif nodeName_ == 'expOneSynapse': + obj_.original_tagname_ = "alphaSynapse" + elif nodeName_ == "expOneSynapse": obj_ = ExpOneSynapse.factory(parent_object_=self) obj_.build(child_) self.exp_one_synapses.append(obj_) - obj_.original_tagname_ = 'expOneSynapse' - elif nodeName_ == 'expTwoSynapse': + obj_.original_tagname_ = "expOneSynapse" + elif nodeName_ == "expTwoSynapse": class_obj_ = self.get_class_obj_(child_, ExpTwoSynapse) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.exp_two_synapses.append(obj_) - obj_.original_tagname_ = 'expTwoSynapse' - elif nodeName_ == 'expThreeSynapse': + obj_.original_tagname_ = "expTwoSynapse" + elif nodeName_ == "expThreeSynapse": obj_ = ExpThreeSynapse.factory(parent_object_=self) obj_.build(child_) self.exp_three_synapses.append(obj_) - obj_.original_tagname_ = 'expThreeSynapse' - elif nodeName_ == 'blockingPlasticSynapse': + obj_.original_tagname_ = "expThreeSynapse" + elif nodeName_ == "blockingPlasticSynapse": obj_ = BlockingPlasticSynapse.factory(parent_object_=self) obj_.build(child_) self.blocking_plastic_synapses.append(obj_) - obj_.original_tagname_ = 'blockingPlasticSynapse' - elif nodeName_ == 'doubleSynapse': + obj_.original_tagname_ = "blockingPlasticSynapse" + elif nodeName_ == "doubleSynapse": obj_ = DoubleSynapse.factory(parent_object_=self) obj_.build(child_) self.double_synapses.append(obj_) - obj_.original_tagname_ = 'doubleSynapse' - elif nodeName_ == 'gapJunction': + obj_.original_tagname_ = "doubleSynapse" + elif nodeName_ == "gapJunction": obj_ = GapJunction.factory(parent_object_=self) obj_.build(child_) self.gap_junctions.append(obj_) - obj_.original_tagname_ = 'gapJunction' - elif nodeName_ == 'silentSynapse': + obj_.original_tagname_ = "gapJunction" + elif nodeName_ == "silentSynapse": obj_ = SilentSynapse.factory(parent_object_=self) obj_.build(child_) self.silent_synapses.append(obj_) - obj_.original_tagname_ = 'silentSynapse' - elif nodeName_ == 'linearGradedSynapse': + obj_.original_tagname_ = "silentSynapse" + elif nodeName_ == "linearGradedSynapse": obj_ = LinearGradedSynapse.factory(parent_object_=self) obj_.build(child_) self.linear_graded_synapses.append(obj_) - obj_.original_tagname_ = 'linearGradedSynapse' - elif nodeName_ == 'gradedSynapse': + obj_.original_tagname_ = "linearGradedSynapse" + elif nodeName_ == "gradedSynapse": obj_ = GradedSynapse.factory(parent_object_=self) obj_.build(child_) self.graded_synapses.append(obj_) - obj_.original_tagname_ = 'gradedSynapse' - elif nodeName_ == 'biophysicalProperties': + obj_.original_tagname_ = "gradedSynapse" + elif nodeName_ == "biophysicalProperties": obj_ = BiophysicalProperties.factory(parent_object_=self) obj_.build(child_) self.biophysical_properties.append(obj_) - obj_.original_tagname_ = 'biophysicalProperties' - elif nodeName_ == 'cell': + obj_.original_tagname_ = "biophysicalProperties" + elif nodeName_ == "cell": class_obj_ = self.get_class_obj_(child_, Cell) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.cells.append(obj_) - obj_.original_tagname_ = 'cell' - elif nodeName_ == 'cell2CaPools': + obj_.original_tagname_ = "cell" + elif nodeName_ == "cell2CaPools": obj_ = Cell2CaPools.factory(parent_object_=self) obj_.build(child_) self.cell2_ca_poolses.append(obj_) - obj_.original_tagname_ = 'cell2CaPools' - elif nodeName_ == 'baseCell': + obj_.original_tagname_ = "cell2CaPools" + elif nodeName_ == "baseCell": class_obj_ = self.get_class_obj_(child_, BaseCell) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.base_cells.append(obj_) - obj_.original_tagname_ = 'baseCell' - elif nodeName_ == 'iafTauCell': + obj_.original_tagname_ = "baseCell" + elif nodeName_ == "iafTauCell": class_obj_ = self.get_class_obj_(child_, IafTauCell) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.iaf_tau_cells.append(obj_) - obj_.original_tagname_ = 'iafTauCell' - elif nodeName_ == 'iafTauRefCell': + obj_.original_tagname_ = "iafTauCell" + elif nodeName_ == "iafTauRefCell": obj_ = IafTauRefCell.factory(parent_object_=self) obj_.build(child_) self.iaf_tau_ref_cells.append(obj_) - obj_.original_tagname_ = 'iafTauRefCell' - elif nodeName_ == 'iafCell': + obj_.original_tagname_ = "iafTauRefCell" + elif nodeName_ == "iafCell": class_obj_ = self.get_class_obj_(child_, IafCell) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.iaf_cells.append(obj_) - obj_.original_tagname_ = 'iafCell' - elif nodeName_ == 'iafRefCell': + obj_.original_tagname_ = "iafCell" + elif nodeName_ == "iafRefCell": obj_ = IafRefCell.factory(parent_object_=self) obj_.build(child_) self.iaf_ref_cells.append(obj_) - obj_.original_tagname_ = 'iafRefCell' - elif nodeName_ == 'izhikevichCell': + obj_.original_tagname_ = "iafRefCell" + elif nodeName_ == "izhikevichCell": obj_ = IzhikevichCell.factory(parent_object_=self) obj_.build(child_) self.izhikevich_cells.append(obj_) - obj_.original_tagname_ = 'izhikevichCell' - elif nodeName_ == 'izhikevich2007Cell': + obj_.original_tagname_ = "izhikevichCell" + elif nodeName_ == "izhikevich2007Cell": obj_ = Izhikevich2007Cell.factory(parent_object_=self) obj_.build(child_) self.izhikevich2007_cells.append(obj_) - obj_.original_tagname_ = 'izhikevich2007Cell' - elif nodeName_ == 'adExIaFCell': + obj_.original_tagname_ = "izhikevich2007Cell" + elif nodeName_ == "adExIaFCell": obj_ = AdExIaFCell.factory(parent_object_=self) obj_.build(child_) self.ad_ex_ia_f_cells.append(obj_) - obj_.original_tagname_ = 'adExIaFCell' - elif nodeName_ == 'fitzHughNagumoCell': + obj_.original_tagname_ = "adExIaFCell" + elif nodeName_ == "fitzHughNagumoCell": obj_ = FitzHughNagumoCell.factory(parent_object_=self) obj_.build(child_) self.fitz_hugh_nagumo_cells.append(obj_) - obj_.original_tagname_ = 'fitzHughNagumoCell' - elif nodeName_ == 'fitzHughNagumo1969Cell': + obj_.original_tagname_ = "fitzHughNagumoCell" + elif nodeName_ == "fitzHughNagumo1969Cell": obj_ = FitzHughNagumo1969Cell.factory(parent_object_=self) obj_.build(child_) self.fitz_hugh_nagumo1969_cells.append(obj_) - obj_.original_tagname_ = 'fitzHughNagumo1969Cell' - elif nodeName_ == 'pinskyRinzelCA3Cell': + obj_.original_tagname_ = "fitzHughNagumo1969Cell" + elif nodeName_ == "pinskyRinzelCA3Cell": obj_ = PinskyRinzelCA3Cell.factory(parent_object_=self) obj_.build(child_) self.pinsky_rinzel_ca3_cells.append(obj_) - obj_.original_tagname_ = 'pinskyRinzelCA3Cell' - elif nodeName_ == 'pulseGenerator': + obj_.original_tagname_ = "pinskyRinzelCA3Cell" + elif nodeName_ == "pulseGenerator": obj_ = PulseGenerator.factory(parent_object_=self) obj_.build(child_) self.pulse_generators.append(obj_) - obj_.original_tagname_ = 'pulseGenerator' - elif nodeName_ == 'pulseGeneratorDL': + obj_.original_tagname_ = "pulseGenerator" + elif nodeName_ == "pulseGeneratorDL": obj_ = PulseGeneratorDL.factory(parent_object_=self) obj_.build(child_) self.pulse_generator_dls.append(obj_) - obj_.original_tagname_ = 'pulseGeneratorDL' - elif nodeName_ == 'sineGenerator': + obj_.original_tagname_ = "pulseGeneratorDL" + elif nodeName_ == "sineGenerator": obj_ = SineGenerator.factory(parent_object_=self) obj_.build(child_) self.sine_generators.append(obj_) - obj_.original_tagname_ = 'sineGenerator' - elif nodeName_ == 'sineGeneratorDL': + obj_.original_tagname_ = "sineGenerator" + elif nodeName_ == "sineGeneratorDL": obj_ = SineGeneratorDL.factory(parent_object_=self) obj_.build(child_) self.sine_generator_dls.append(obj_) - obj_.original_tagname_ = 'sineGeneratorDL' - elif nodeName_ == 'rampGenerator': + obj_.original_tagname_ = "sineGeneratorDL" + elif nodeName_ == "rampGenerator": obj_ = RampGenerator.factory(parent_object_=self) obj_.build(child_) self.ramp_generators.append(obj_) - obj_.original_tagname_ = 'rampGenerator' - elif nodeName_ == 'rampGeneratorDL': + obj_.original_tagname_ = "rampGenerator" + elif nodeName_ == "rampGeneratorDL": obj_ = RampGeneratorDL.factory(parent_object_=self) obj_.build(child_) self.ramp_generator_dls.append(obj_) - obj_.original_tagname_ = 'rampGeneratorDL' - elif nodeName_ == 'compoundInput': + obj_.original_tagname_ = "rampGeneratorDL" + elif nodeName_ == "compoundInput": obj_ = CompoundInput.factory(parent_object_=self) obj_.build(child_) self.compound_inputs.append(obj_) - obj_.original_tagname_ = 'compoundInput' - elif nodeName_ == 'compoundInputDL': + obj_.original_tagname_ = "compoundInput" + elif nodeName_ == "compoundInputDL": obj_ = CompoundInputDL.factory(parent_object_=self) obj_.build(child_) self.compound_input_dls.append(obj_) - obj_.original_tagname_ = 'compoundInputDL' - elif nodeName_ == 'voltageClamp': + obj_.original_tagname_ = "compoundInputDL" + elif nodeName_ == "voltageClamp": obj_ = VoltageClamp.factory(parent_object_=self) obj_.build(child_) self.voltage_clamps.append(obj_) - obj_.original_tagname_ = 'voltageClamp' - elif nodeName_ == 'voltageClampTriple': + obj_.original_tagname_ = "voltageClamp" + elif nodeName_ == "voltageClampTriple": obj_ = VoltageClampTriple.factory(parent_object_=self) obj_.build(child_) self.voltage_clamp_triples.append(obj_) - obj_.original_tagname_ = 'voltageClampTriple' - elif nodeName_ == 'spikeArray': + obj_.original_tagname_ = "voltageClampTriple" + elif nodeName_ == "spikeArray": obj_ = SpikeArray.factory(parent_object_=self) obj_.build(child_) self.spike_arrays.append(obj_) - obj_.original_tagname_ = 'spikeArray' - elif nodeName_ == 'timedSynapticInput': + obj_.original_tagname_ = "spikeArray" + elif nodeName_ == "timedSynapticInput": obj_ = TimedSynapticInput.factory(parent_object_=self) obj_.build(child_) self.timed_synaptic_inputs.append(obj_) - obj_.original_tagname_ = 'timedSynapticInput' - elif nodeName_ == 'spikeGenerator': + obj_.original_tagname_ = "timedSynapticInput" + elif nodeName_ == "spikeGenerator": obj_ = SpikeGenerator.factory(parent_object_=self) obj_.build(child_) self.spike_generators.append(obj_) - obj_.original_tagname_ = 'spikeGenerator' - elif nodeName_ == 'spikeGeneratorRandom': + obj_.original_tagname_ = "spikeGenerator" + elif nodeName_ == "spikeGeneratorRandom": obj_ = SpikeGeneratorRandom.factory(parent_object_=self) obj_.build(child_) self.spike_generator_randoms.append(obj_) - obj_.original_tagname_ = 'spikeGeneratorRandom' - elif nodeName_ == 'spikeGeneratorPoisson': + obj_.original_tagname_ = "spikeGeneratorRandom" + elif nodeName_ == "spikeGeneratorPoisson": class_obj_ = self.get_class_obj_(child_, SpikeGeneratorPoisson) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.spike_generator_poissons.append(obj_) - obj_.original_tagname_ = 'spikeGeneratorPoisson' - elif nodeName_ == 'spikeGeneratorRefPoisson': + obj_.original_tagname_ = "spikeGeneratorPoisson" + elif nodeName_ == "spikeGeneratorRefPoisson": obj_ = SpikeGeneratorRefPoisson.factory(parent_object_=self) obj_.build(child_) self.spike_generator_ref_poissons.append(obj_) - obj_.original_tagname_ = 'spikeGeneratorRefPoisson' - elif nodeName_ == 'poissonFiringSynapse': + obj_.original_tagname_ = "spikeGeneratorRefPoisson" + elif nodeName_ == "poissonFiringSynapse": obj_ = PoissonFiringSynapse.factory(parent_object_=self) obj_.build(child_) self.poisson_firing_synapses.append(obj_) - obj_.original_tagname_ = 'poissonFiringSynapse' - elif nodeName_ == 'transientPoissonFiringSynapse': + obj_.original_tagname_ = "poissonFiringSynapse" + elif nodeName_ == "transientPoissonFiringSynapse": obj_ = TransientPoissonFiringSynapse.factory(parent_object_=self) obj_.build(child_) self.transient_poisson_firing_synapses.append(obj_) - obj_.original_tagname_ = 'transientPoissonFiringSynapse' - elif nodeName_ == 'IF_curr_alpha': + obj_.original_tagname_ = "transientPoissonFiringSynapse" + elif nodeName_ == "IF_curr_alpha": obj_ = IF_curr_alpha.factory(parent_object_=self) obj_.build(child_) self.IF_curr_alpha.append(obj_) - obj_.original_tagname_ = 'IF_curr_alpha' - elif nodeName_ == 'IF_curr_exp': + obj_.original_tagname_ = "IF_curr_alpha" + elif nodeName_ == "IF_curr_exp": obj_ = IF_curr_exp.factory(parent_object_=self) obj_.build(child_) self.IF_curr_exp.append(obj_) - obj_.original_tagname_ = 'IF_curr_exp' - elif nodeName_ == 'IF_cond_alpha': + obj_.original_tagname_ = "IF_curr_exp" + elif nodeName_ == "IF_cond_alpha": obj_ = IF_cond_alpha.factory(parent_object_=self) obj_.build(child_) self.IF_cond_alpha.append(obj_) - obj_.original_tagname_ = 'IF_cond_alpha' - elif nodeName_ == 'IF_cond_exp': + obj_.original_tagname_ = "IF_cond_alpha" + elif nodeName_ == "IF_cond_exp": obj_ = IF_cond_exp.factory(parent_object_=self) obj_.build(child_) self.IF_cond_exp.append(obj_) - obj_.original_tagname_ = 'IF_cond_exp' - elif nodeName_ == 'EIF_cond_exp_isfa_ista': + obj_.original_tagname_ = "IF_cond_exp" + elif nodeName_ == "EIF_cond_exp_isfa_ista": class_obj_ = self.get_class_obj_(child_, EIF_cond_exp_isfa_ista) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.EIF_cond_exp_isfa_ista.append(obj_) - obj_.original_tagname_ = 'EIF_cond_exp_isfa_ista' - elif nodeName_ == 'EIF_cond_alpha_isfa_ista': + obj_.original_tagname_ = "EIF_cond_exp_isfa_ista" + elif nodeName_ == "EIF_cond_alpha_isfa_ista": obj_ = EIF_cond_alpha_isfa_ista.factory(parent_object_=self) obj_.build(child_) self.EIF_cond_alpha_isfa_ista.append(obj_) - obj_.original_tagname_ = 'EIF_cond_alpha_isfa_ista' - elif nodeName_ == 'HH_cond_exp': + obj_.original_tagname_ = "EIF_cond_alpha_isfa_ista" + elif nodeName_ == "HH_cond_exp": obj_ = HH_cond_exp.factory(parent_object_=self) obj_.build(child_) self.HH_cond_exp.append(obj_) - obj_.original_tagname_ = 'HH_cond_exp' - elif nodeName_ == 'expCondSynapse': + obj_.original_tagname_ = "HH_cond_exp" + elif nodeName_ == "expCondSynapse": obj_ = ExpCondSynapse.factory(parent_object_=self) obj_.build(child_) self.exp_cond_synapses.append(obj_) - obj_.original_tagname_ = 'expCondSynapse' - elif nodeName_ == 'alphaCondSynapse': + obj_.original_tagname_ = "expCondSynapse" + elif nodeName_ == "alphaCondSynapse": obj_ = AlphaCondSynapse.factory(parent_object_=self) obj_.build(child_) self.alpha_cond_synapses.append(obj_) - obj_.original_tagname_ = 'alphaCondSynapse' - elif nodeName_ == 'expCurrSynapse': + obj_.original_tagname_ = "alphaCondSynapse" + elif nodeName_ == "expCurrSynapse": obj_ = ExpCurrSynapse.factory(parent_object_=self) obj_.build(child_) self.exp_curr_synapses.append(obj_) - obj_.original_tagname_ = 'expCurrSynapse' - elif nodeName_ == 'alphaCurrSynapse': + obj_.original_tagname_ = "expCurrSynapse" + elif nodeName_ == "alphaCurrSynapse": obj_ = AlphaCurrSynapse.factory(parent_object_=self) obj_.build(child_) self.alpha_curr_synapses.append(obj_) - obj_.original_tagname_ = 'alphaCurrSynapse' - elif nodeName_ == 'SpikeSourcePoisson': + obj_.original_tagname_ = "alphaCurrSynapse" + elif nodeName_ == "SpikeSourcePoisson": obj_ = SpikeSourcePoisson.factory(parent_object_=self) obj_.build(child_) self.SpikeSourcePoisson.append(obj_) - obj_.original_tagname_ = 'SpikeSourcePoisson' - elif nodeName_ == 'network': + obj_.original_tagname_ = "SpikeSourcePoisson" + elif nodeName_ == "network": obj_ = Network.factory(parent_object_=self) obj_.build(child_) self.networks.append(obj_) - obj_.original_tagname_ = 'network' - elif nodeName_ == 'ComponentType': + obj_.original_tagname_ = "network" + elif nodeName_ == "ComponentType": obj_ = ComponentType.factory(parent_object_=self) obj_.build(child_) self.ComponentType.append(obj_) - obj_.original_tagname_ = 'ComponentType' + obj_.original_tagname_ = "ComponentType" super(NeuroMLDocument, self).buildChildren(child_, node, nodeName_, True) - def summary(self, show_includes=True, show_non_network=True): """Get a pretty-printed summary of the complete NeuroMLDocument. @@ -16163,231 +30080,426 @@ def summary(self, show_includes=True, show_non_network=True): import inspect info = "*******************************************************\n" - info+="* NeuroMLDocument: "+self.id+"\n*\n" + info += "* NeuroMLDocument: " + self.id + "\n*\n" post = "" membs = inspect.getmembers(self) for memb in membs: - if isinstance(memb[1], list) and len(memb[1])>0 and not memb[0].endswith('_') and not memb[0] == 'networks': - if (memb[0] == 'includes' and show_includes) or (not memb[0] == 'includes' and show_non_network): + if ( + isinstance(memb[1], list) + and len(memb[1]) > 0 + and not memb[0].endswith("_") + and not memb[0] == "networks" + ): + if (memb[0] == "includes" and show_includes) or ( + not memb[0] == "includes" and show_non_network + ): post = "*\n" - info+="* "+str(memb[1][0].__class__.__name__)+": " + info += "* " + str(memb[1][0].__class__.__name__) + ": " listed = [] for entry in memb[1]: - if hasattr(entry,'id'): + if hasattr(entry, "id"): listed.append(str(entry.id)) - elif hasattr(entry,'name'): + elif hasattr(entry, "name"): listed.append(str(entry.name)) - elif hasattr(entry,'href'): + elif hasattr(entry, "href"): listed.append(str(entry.href)) - elif hasattr(entry,'tag'): - listed.append(str(entry.tag)+" = "+str(entry.value)) - info+= str(sorted(listed))+"\n" - info+= post + elif hasattr(entry, "tag"): + listed.append(str(entry.tag) + " = " + str(entry.value)) + info += str(sorted(listed)) + "\n" + info += post for network in self.networks: - info+="* Network: "+network.id + info += "* Network: " + network.id if network.temperature: - info+=" (temperature: "+network.temperature+")" - info+="\n*\n" - tot_pop =0 + info += " (temperature: " + network.temperature + ")" + info += "\n*\n" + tot_pop = 0 tot_cells = 0 pop_info = "" for pop in sorted(network.populations, key=lambda x: x.id): - pop_info+="* "+str(pop)+"\n" - tot_pop+=1 - tot_cells+=pop.get_size() - if len(pop.instances)>0: + pop_info += "* " + str(pop) + "\n" + tot_pop += 1 + tot_cells += pop.get_size() + if len(pop.instances) > 0: loc = pop.instances[0].location - pop_info+="* Locations: ["+str(loc)+", ...]\n" - if len(pop.properties)>0: - pop_info+="* Properties: " + pop_info += "* Locations: [" + str(loc) + ", ...]\n" + if len(pop.properties) > 0: + pop_info += "* Properties: " for p in pop.properties: - pop_info+=(str(p.tag)+'='+str(p.value)+'; ') - pop_info+="\n" - - info+="* "+str(tot_cells)+" cells in "+str(tot_pop)+" populations \n"+pop_info+"*\n" - + pop_info += str(p.tag) + "=" + str(p.value) + "; " + pop_info += "\n" + + info += ( + "* " + + str(tot_cells) + + " cells in " + + str(tot_pop) + + " populations \n" + + pop_info + + "*\n" + ) - tot_proj =0 + tot_proj = 0 tot_conns = 0 proj_info = "" for proj in sorted(network.projections, key=lambda x: x.id): - proj_info+="* "+str(proj)+"\n" - tot_proj+=1 - tot_conns+=len(proj.connections) - tot_conns+=len(proj.connection_wds) - if len(proj.connections)>0: - proj_info+="* "+str(len(proj.connections))+" connections: [("+str(proj.connections[0])+"), ...]\n" - if len(proj.connection_wds)>0: - proj_info+="* "+str(len(proj.connection_wds))+" connections (wd): [("+str(proj.connection_wds[0])+"), ...]\n" + proj_info += "* " + str(proj) + "\n" + tot_proj += 1 + tot_conns += len(proj.connections) + tot_conns += len(proj.connection_wds) + if len(proj.connections) > 0: + proj_info += ( + "* " + + str(len(proj.connections)) + + " connections: [(" + + str(proj.connections[0]) + + "), ...]\n" + ) + if len(proj.connection_wds) > 0: + proj_info += ( + "* " + + str(len(proj.connection_wds)) + + " connections (wd): [(" + + str(proj.connection_wds[0]) + + "), ...]\n" + ) for proj in sorted(network.electrical_projections, key=lambda x: x.id): - proj_info+="* Electrical projection: "+proj.id+" from "+proj.presynaptic_population+" to "+proj.postsynaptic_population+"\n" - tot_proj+=1 - tot_conns+=len(proj.electrical_connections) - tot_conns+=len(proj.electrical_connection_instances) - tot_conns+=len(proj.electrical_connection_instance_ws) - if len(proj.electrical_connections)>0: - proj_info+="* "+str(len(proj.electrical_connections))+" connections: [("+str(proj.electrical_connections[0])+"), ...]\n" - if len(proj.electrical_connection_instances)>0: - proj_info+="* "+str(len(proj.electrical_connection_instances))+" connections: [("+str(proj.electrical_connection_instances[0])+"), ...]\n" - if len(proj.electrical_connection_instance_ws)>0: - proj_info+="* "+str(len(proj.electrical_connection_instance_ws))+" connections: [("+str(proj.electrical_connection_instance_ws[0])+"), ...]\n" + proj_info += ( + "* Electrical projection: " + + proj.id + + " from " + + proj.presynaptic_population + + " to " + + proj.postsynaptic_population + + "\n" + ) + tot_proj += 1 + tot_conns += len(proj.electrical_connections) + tot_conns += len(proj.electrical_connection_instances) + tot_conns += len(proj.electrical_connection_instance_ws) + if len(proj.electrical_connections) > 0: + proj_info += ( + "* " + + str(len(proj.electrical_connections)) + + " connections: [(" + + str(proj.electrical_connections[0]) + + "), ...]\n" + ) + if len(proj.electrical_connection_instances) > 0: + proj_info += ( + "* " + + str(len(proj.electrical_connection_instances)) + + " connections: [(" + + str(proj.electrical_connection_instances[0]) + + "), ...]\n" + ) + if len(proj.electrical_connection_instance_ws) > 0: + proj_info += ( + "* " + + str(len(proj.electrical_connection_instance_ws)) + + " connections: [(" + + str(proj.electrical_connection_instance_ws[0]) + + "), ...]\n" + ) for proj in sorted(network.continuous_projections, key=lambda x: x.id): - proj_info+="* Continuous projection: "+proj.id+" from "+proj.presynaptic_population+" to "+proj.postsynaptic_population+"\n" - tot_proj+=1 - tot_conns+=len(proj.continuous_connections) - tot_conns+=len(proj.continuous_connection_instances) - tot_conns+=len(proj.continuous_connection_instance_ws) - if len(proj.continuous_connections)>0: - proj_info+="* "+str(len(proj.continuous_connections))+" connections: [("+str(proj.continuous_connections[0])+"), ...]\n" - if len(proj.continuous_connection_instances)>0: - proj_info+="* "+str(len(proj.continuous_connection_instances))+" connections: [("+str(proj.continuous_connection_instances[0])+"), ...]\n" - if len(proj.continuous_connection_instance_ws)>0: - proj_info+="* "+str(len(proj.continuous_connection_instance_ws))+" connections (w): [("+str(proj.continuous_connection_instance_ws[0])+"), ...]\n" - - info+="* "+str(tot_conns)+" connections in "+str(tot_proj)+" projections \n"+proj_info+"*\n" - - if len(network.synaptic_connections)>0: - info+="* "+str(len(network.synaptic_connections))+" explicit synaptic connections (outside of projections)\n" + proj_info += ( + "* Continuous projection: " + + proj.id + + " from " + + proj.presynaptic_population + + " to " + + proj.postsynaptic_population + + "\n" + ) + tot_proj += 1 + tot_conns += len(proj.continuous_connections) + tot_conns += len(proj.continuous_connection_instances) + tot_conns += len(proj.continuous_connection_instance_ws) + if len(proj.continuous_connections) > 0: + proj_info += ( + "* " + + str(len(proj.continuous_connections)) + + " connections: [(" + + str(proj.continuous_connections[0]) + + "), ...]\n" + ) + if len(proj.continuous_connection_instances) > 0: + proj_info += ( + "* " + + str(len(proj.continuous_connection_instances)) + + " connections: [(" + + str(proj.continuous_connection_instances[0]) + + "), ...]\n" + ) + if len(proj.continuous_connection_instance_ws) > 0: + proj_info += ( + "* " + + str(len(proj.continuous_connection_instance_ws)) + + " connections (w): [(" + + str(proj.continuous_connection_instance_ws[0]) + + "), ...]\n" + ) + + info += ( + "* " + + str(tot_conns) + + " connections in " + + str(tot_proj) + + " projections \n" + + proj_info + + "*\n" + ) + + if len(network.synaptic_connections) > 0: + info += ( + "* " + + str(len(network.synaptic_connections)) + + " explicit synaptic connections (outside of projections)\n" + ) for sc in network.synaptic_connections: - info+="* "+str(sc)+"\n" - info+="*\n" + info += "* " + str(sc) + "\n" + info += "*\n" tot_input_lists = 0 tot_inputs = 0 input_info = "" for il in sorted(network.input_lists, key=lambda x: x.id): - input_info+="* "+str(il)+"\n" + input_info += "* " + str(il) + "\n" tot_input_lists += 1 - if len(il.input)>0: - input_info+="* "+str(len(il.input))+" inputs: [("+str(il.input[0])+"), ...]\n" - tot_inputs+=len(il.input) - if len(il.input_ws)>0: - input_info+="* "+str(len(il.input_ws))+" inputs: [("+str(il.input_ws[0])+"), ...]\n" - tot_inputs+=len(il.input_ws) - - info+="* "+str(tot_inputs)+" inputs in "+str(tot_input_lists)+" input lists \n"+input_info+"*\n" + if len(il.input) > 0: + input_info += ( + "* " + + str(len(il.input)) + + " inputs: [(" + + str(il.input[0]) + + "), ...]\n" + ) + tot_inputs += len(il.input) + if len(il.input_ws) > 0: + input_info += ( + "* " + + str(len(il.input_ws)) + + " inputs: [(" + + str(il.input_ws[0]) + + "), ...]\n" + ) + tot_inputs += len(il.input_ws) + + info += ( + "* " + + str(tot_inputs) + + " inputs in " + + str(tot_input_lists) + + " input lists \n" + + input_info + + "*\n" + ) - if len(network.explicit_inputs)>0: - info+="* "+str(len(network.explicit_inputs))+" explicit inputs (outside of input lists)\n" + if len(network.explicit_inputs) > 0: + info += ( + "* " + + str(len(network.explicit_inputs)) + + " explicit inputs (outside of input lists)\n" + ) for el in network.explicit_inputs: - info+="* "+str(el)+"\n" - info+="*\n" - + info += "* " + str(el) + "\n" + info += "*\n" - info+="*******************************************************" + info += "*******************************************************" return info warn_count = 0 - def get_by_id(self,id): + def get_by_id(self, id): """Get a component by specifying its ID. :param id: id of Component to get :type id: str :returns: Component with given ID or None if no Component with provided ID was found """ - if len(id)==0: + if len(id) == 0: import inspect + callframe = inspect.getouterframes(inspect.currentframe(), 2) - print('Method: '+ callframe[1][3] + ' is asking for an element with no id...') + print( + "Method: " + callframe[1][3] + " is asking for an element with no id..." + ) return None all_ids = [] for ms in self.member_data_items_: mlist = self.__getattribute__(ms.name) for m in mlist: - if hasattr(m,"id"): + if hasattr(m, "id"): if m.id == id: return m else: all_ids.append(m.id) from neuroml.loaders import print_ - if self.warn_count<10: - print_("Id "+id+" not found in element. All ids: "+str(sorted(all_ids))) - self.warn_count+=1 - elif self.warn_count==10: + + if self.warn_count < 10: + print_( + "Id " + + id + + " not found in element. All ids: " + + str(sorted(all_ids)) + ) + self.warn_count += 1 + elif self.warn_count == 10: print_(" - Suppressing further warnings about id not found...") return None - def append(self,element): + def append(self, element): """Append an element :param element: element to append :type element: Object """ from neuroml.utils import append_to_element - append_to_element(self,element) + + append_to_element(self, element) # end class NeuroMLDocument class BasePynnSynapse(BaseSynapse): member_data_items_ = [ - MemberSpec_('tau_syn', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("tau_syn", "xs:float", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + tau_syn=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BasePynnSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BasePynnSynapse, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.tau_syn = _cast(float, tau_syn) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BasePynnSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BasePynnSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if BasePynnSynapse.subclass: return BasePynnSynapse.subclass(*args_, **kwargs_) else: return BasePynnSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(BasePynnSynapse, self).hasContent_() - ): + if super(BasePynnSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BasePynnSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BasePynnSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BasePynnSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BasePynnSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BasePynnSynapse') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BasePynnSynapse" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BasePynnSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BasePynnSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BasePynnSynapse'): - super(BasePynnSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BasePynnSynapse') - if self.tau_syn is not None and 'tau_syn' not in already_processed: - already_processed.add('tau_syn') - outfile.write(' tau_syn="%s"' % self.gds_format_float(self.tau_syn, input_name='tau_syn')) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BasePynnSynapse", + ): + super(BasePynnSynapse, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BasePynnSynapse" + ) + if self.tau_syn is not None and "tau_syn" not in already_processed: + already_processed.add("tau_syn") + outfile.write( + ' tau_syn="%s"' + % self.gds_format_float(self.tau_syn, input_name="tau_syn") + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BasePynnSynapse', fromsubclass_=False, pretty_print=True): - super(BasePynnSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BasePynnSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(BasePynnSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -16395,107 +30507,195 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tau_syn', node) - if value is not None and 'tau_syn' not in already_processed: - already_processed.add('tau_syn') + value = find_attr_value_("tau_syn", node) + if value is not None and "tau_syn" not in already_processed: + already_processed.add("tau_syn") try: self.tau_syn = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_syn): %s' % exp) - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise ValueError("Bad float/double attribute (tau_syn): %s" % exp) + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(BasePynnSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(BasePynnSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class BasePynnSynapse class basePyNNCell(BaseCell): member_data_items_ = [ - MemberSpec_('cm', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('i_offset', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('tau_syn_E', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('tau_syn_I', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_init', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("cm", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("i_offset", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("tau_syn_E", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("tau_syn_I", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("v_init", "xs:float", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(basePyNNCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(basePyNNCell, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.cm = _cast(float, cm) self.i_offset = _cast(float, i_offset) self.tau_syn_E = _cast(float, tau_syn_E) self.tau_syn_I = _cast(float, tau_syn_I) self.v_init = _cast(float, v_init) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, basePyNNCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, basePyNNCell) if subclass is not None: return subclass(*args_, **kwargs_) if basePyNNCell.subclass: return basePyNNCell.subclass(*args_, **kwargs_) else: return basePyNNCell(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(basePyNNCell, self).hasContent_() - ): + if super(basePyNNCell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('basePyNNCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="basePyNNCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("basePyNNCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNCell') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="basePyNNCell" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='basePyNNCell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="basePyNNCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='basePyNNCell'): - super(basePyNNCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNCell') - if self.cm is not None and 'cm' not in already_processed: - already_processed.add('cm') - outfile.write(' cm="%s"' % self.gds_format_float(self.cm, input_name='cm')) - if self.i_offset is not None and 'i_offset' not in already_processed: - already_processed.add('i_offset') - outfile.write(' i_offset="%s"' % self.gds_format_float(self.i_offset, input_name='i_offset')) - if self.tau_syn_E is not None and 'tau_syn_E' not in already_processed: - already_processed.add('tau_syn_E') - outfile.write(' tau_syn_E="%s"' % self.gds_format_float(self.tau_syn_E, input_name='tau_syn_E')) - if self.tau_syn_I is not None and 'tau_syn_I' not in already_processed: - already_processed.add('tau_syn_I') - outfile.write(' tau_syn_I="%s"' % self.gds_format_float(self.tau_syn_I, input_name='tau_syn_I')) - if self.v_init is not None and 'v_init' not in already_processed: - already_processed.add('v_init') - outfile.write(' v_init="%s"' % self.gds_format_float(self.v_init, input_name='v_init')) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="basePyNNCell", + ): + super(basePyNNCell, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="basePyNNCell" + ) + if self.cm is not None and "cm" not in already_processed: + already_processed.add("cm") + outfile.write(' cm="%s"' % self.gds_format_float(self.cm, input_name="cm")) + if self.i_offset is not None and "i_offset" not in already_processed: + already_processed.add("i_offset") + outfile.write( + ' i_offset="%s"' + % self.gds_format_float(self.i_offset, input_name="i_offset") + ) + if self.tau_syn_E is not None and "tau_syn_E" not in already_processed: + already_processed.add("tau_syn_E") + outfile.write( + ' tau_syn_E="%s"' + % self.gds_format_float(self.tau_syn_E, input_name="tau_syn_E") + ) + if self.tau_syn_I is not None and "tau_syn_I" not in already_processed: + already_processed.add("tau_syn_I") + outfile.write( + ' tau_syn_I="%s"' + % self.gds_format_float(self.tau_syn_I, input_name="tau_syn_I") + ) + if self.v_init is not None and "v_init" not in already_processed: + already_processed.add("v_init") + outfile.write( + ' v_init="%s"' % self.gds_format_float(self.v_init, input_name="v_init") + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNCell', fromsubclass_=False, pretty_print=True): - super(basePyNNCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="basePyNNCell", + fromsubclass_=False, + pretty_print=True, + ): + super(basePyNNCell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -16503,67 +30703,121 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('cm', node) - if value is not None and 'cm' not in already_processed: - already_processed.add('cm') + value = find_attr_value_("cm", node) + if value is not None and "cm" not in already_processed: + already_processed.add("cm") try: self.cm = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (cm): %s' % exp) - value = find_attr_value_('i_offset', node) - if value is not None and 'i_offset' not in already_processed: - already_processed.add('i_offset') + raise ValueError("Bad float/double attribute (cm): %s" % exp) + value = find_attr_value_("i_offset", node) + if value is not None and "i_offset" not in already_processed: + already_processed.add("i_offset") try: self.i_offset = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (i_offset): %s' % exp) - value = find_attr_value_('tau_syn_E', node) - if value is not None and 'tau_syn_E' not in already_processed: - already_processed.add('tau_syn_E') + raise ValueError("Bad float/double attribute (i_offset): %s" % exp) + value = find_attr_value_("tau_syn_E", node) + if value is not None and "tau_syn_E" not in already_processed: + already_processed.add("tau_syn_E") try: self.tau_syn_E = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_syn_E): %s' % exp) - value = find_attr_value_('tau_syn_I', node) - if value is not None and 'tau_syn_I' not in already_processed: - already_processed.add('tau_syn_I') + raise ValueError("Bad float/double attribute (tau_syn_E): %s" % exp) + value = find_attr_value_("tau_syn_I", node) + if value is not None and "tau_syn_I" not in already_processed: + already_processed.add("tau_syn_I") try: self.tau_syn_I = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_syn_I): %s' % exp) - value = find_attr_value_('v_init', node) - if value is not None and 'v_init' not in already_processed: - already_processed.add('v_init') + raise ValueError("Bad float/double attribute (tau_syn_I): %s" % exp) + value = find_attr_value_("v_init", node) + if value is not None and "v_init" not in already_processed: + already_processed.add("v_init") try: self.v_init = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (v_init): %s' % exp) - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise ValueError("Bad float/double attribute (v_init): %s" % exp) + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(basePyNNCell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(basePyNNCell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class basePyNNCell class ContinuousProjection(BaseProjection): """Projection between two populations consisting of analog connections (e.g. graded synapses)""" + member_data_items_ = [ - MemberSpec_('continuous_connections', 'ContinuousConnection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ContinuousConnection', u'name': u'continuousConnection', u'minOccurs': u'0'}, None), - MemberSpec_('continuous_connection_instances', 'ContinuousConnectionInstance', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ContinuousConnectionInstance', u'name': u'continuousConnectionInstance', u'minOccurs': u'0'}, None), - MemberSpec_('continuous_connection_instance_ws', 'ContinuousConnectionInstanceW', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ContinuousConnectionInstanceW', u'name': u'continuousConnectionInstanceW', u'minOccurs': u'0'}, None), + MemberSpec_( + "continuous_connections", + "ContinuousConnection", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ContinuousConnection", + u"name": u"continuousConnection", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "continuous_connection_instances", + "ContinuousConnectionInstance", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ContinuousConnectionInstance", + u"name": u"continuousConnectionInstance", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "continuous_connection_instance_ws", + "ContinuousConnectionInstanceW", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ContinuousConnectionInstanceW", + u"name": u"continuousConnectionInstanceW", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = BaseProjection - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, continuous_connections=None, continuous_connection_instances=None, continuous_connection_instance_ws=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + presynaptic_population=None, + postsynaptic_population=None, + continuous_connections=None, + continuous_connection_instances=None, + continuous_connection_instance_ws=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ContinuousProjection, self).__init__(neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ContinuousProjection, self).__init__( + neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_ + ) if continuous_connections is None: self.continuous_connections = [] else: @@ -16576,62 +30830,143 @@ def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, post self.continuous_connection_instance_ws = [] else: self.continuous_connection_instance_ws = continuous_connection_instance_ws + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ContinuousProjection) + CurrentSubclassModule_, ContinuousProjection + ) if subclass is not None: return subclass(*args_, **kwargs_) if ContinuousProjection.subclass: return ContinuousProjection.subclass(*args_, **kwargs_) else: return ContinuousProjection(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.continuous_connections or - self.continuous_connection_instances or - self.continuous_connection_instance_ws or - super(ContinuousProjection, self).hasContent_() + self.continuous_connections + or self.continuous_connection_instances + or self.continuous_connection_instance_ws + or super(ContinuousProjection, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousProjection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContinuousProjection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousProjection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ContinuousProjection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousProjection') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousProjection", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousProjection', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ContinuousProjection", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousProjection'): - super(ContinuousProjection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousProjection') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousProjection', fromsubclass_=False, pretty_print=True): - super(ContinuousProjection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ContinuousProjection", + ): + super(ContinuousProjection, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousProjection", + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousProjection", + fromsubclass_=False, + pretty_print=True, + ): + super(ContinuousProjection, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for continuousConnection_ in self.continuous_connections: - continuousConnection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='continuousConnection', pretty_print=pretty_print) + continuousConnection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="continuousConnection", + pretty_print=pretty_print, + ) for continuousConnectionInstance_ in self.continuous_connection_instances: - continuousConnectionInstance_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='continuousConnectionInstance', pretty_print=pretty_print) + continuousConnectionInstance_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="continuousConnectionInstance", + pretty_print=pretty_print, + ) for continuousConnectionInstanceW_ in self.continuous_connection_instance_ws: - continuousConnectionInstanceW_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='continuousConnectionInstanceW', pretty_print=pretty_print) + continuousConnectionInstanceW_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="continuousConnectionInstanceW", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -16639,95 +30974,114 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - super(ContinuousProjection, self).buildAttributes(node, attrs, already_processed) + super(ContinuousProjection, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'continuousConnection': + if nodeName_ == "continuousConnection": class_obj_ = self.get_class_obj_(child_, ContinuousConnection) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.continuous_connections.append(obj_) - obj_.original_tagname_ = 'continuousConnection' - elif nodeName_ == 'continuousConnectionInstance': + obj_.original_tagname_ = "continuousConnection" + elif nodeName_ == "continuousConnectionInstance": class_obj_ = self.get_class_obj_(child_, ContinuousConnectionInstance) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.continuous_connection_instances.append(obj_) - obj_.original_tagname_ = 'continuousConnectionInstance' - elif nodeName_ == 'continuousConnectionInstanceW': + obj_.original_tagname_ = "continuousConnectionInstance" + elif nodeName_ == "continuousConnectionInstanceW": obj_ = ContinuousConnectionInstanceW.factory(parent_object_=self) obj_.build(child_) self.continuous_connection_instance_ws.append(obj_) - obj_.original_tagname_ = 'continuousConnectionInstanceW' + obj_.original_tagname_ = "continuousConnectionInstanceW" super(ContinuousProjection, self).buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): - """Export to HDF5 file. """ - #print("Exporting ContinuousProjection: "+str(self.id)+" as HDF5") - + """Export to HDF5 file.""" + # print("Exporting ContinuousProjection: "+str(self.id)+" as HDF5") import numpy - projGroup = h5file.create_group(h5Group, 'projection_'+self.id) + projGroup = h5file.create_group(h5Group, "projection_" + self.id) projGroup._f_setattr("id", self.id) projGroup._f_setattr("type", "continuousProjection") projGroup._f_setattr("presynapticPopulation", self.presynaptic_population) projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population) - pre_comp = self.continuous_connections[0].pre_component if len(self.continuous_connections)>0 else self.continuous_connection_instances[0].pre_component if len(self.continuous_connection_instances)>0 else self.continuous_connection_instance_ws[0].pre_component - projGroup._f_setattr("preComponent", pre_comp ) - post_comp = self.continuous_connections[0].post_component if len(self.continuous_connections)>0 else self.continuous_connection_instances[0].post_component if len(self.continuous_connection_instances)>0 else self.continuous_connection_instance_ws[0].post_component - projGroup._f_setattr("postComponent", post_comp ) + pre_comp = ( + self.continuous_connections[0].pre_component + if len(self.continuous_connections) > 0 + else self.continuous_connection_instances[0].pre_component + if len(self.continuous_connection_instances) > 0 + else self.continuous_connection_instance_ws[0].pre_component + ) + projGroup._f_setattr("preComponent", pre_comp) + post_comp = ( + self.continuous_connections[0].post_component + if len(self.continuous_connections) > 0 + else self.continuous_connection_instances[0].post_component + if len(self.continuous_connection_instances) > 0 + else self.continuous_connection_instance_ws[0].post_component + ) + projGroup._f_setattr("postComponent", post_comp) cols = 7 extra_cols = {} - num_tot = len(self.continuous_connections)+len(self.continuous_connection_instances)+len(self.continuous_connection_instance_ws) + num_tot = ( + len(self.continuous_connections) + + len(self.continuous_connection_instances) + + len(self.continuous_connection_instance_ws) + ) - if len(self.continuous_connection_instance_ws)>0: - extra_cols["column_"+str(cols)] = 'weight' - cols+=1 + if len(self.continuous_connection_instance_ws) > 0: + extra_cols["column_" + str(cols)] = "weight" + cols += 1 - #print("Exporting "+str(num_tot)+" continuous connections") + # print("Exporting "+str(num_tot)+" continuous connections") a = numpy.zeros([num_tot, cols], numpy.float32) - count=0 + count = 0 # TODO: optimise for single compartment cells, i.e. where no pre_segment/post_fraction_along etc. for connection in self.continuous_connections: - a[count,0] = connection.id - a[count,1] = connection.get_pre_cell_id() - a[count,2] = connection.get_post_cell_id() - a[count,3] = connection.pre_segment - a[count,4] = connection.post_segment - a[count,5] = connection.pre_fraction_along - a[count,6] = connection.post_fraction_along - count=count+1 + a[count, 0] = connection.id + a[count, 1] = connection.get_pre_cell_id() + a[count, 2] = connection.get_post_cell_id() + a[count, 3] = connection.pre_segment + a[count, 4] = connection.post_segment + a[count, 5] = connection.pre_fraction_along + a[count, 6] = connection.post_fraction_along + count = count + 1 for connection in self.continuous_connection_instances: - a[count,0] = connection.id - a[count,1] = connection.get_pre_cell_id() - a[count,2] = connection.get_post_cell_id() - a[count,3] = connection.pre_segment - a[count,4] = connection.post_segment - a[count,5] = connection.pre_fraction_along - a[count,6] = connection.post_fraction_along - count=count+1 - + a[count, 0] = connection.id + a[count, 1] = connection.get_pre_cell_id() + a[count, 2] = connection.get_post_cell_id() + a[count, 3] = connection.pre_segment + a[count, 4] = connection.post_segment + a[count, 5] = connection.pre_fraction_along + a[count, 6] = connection.post_fraction_along + count = count + 1 for connection in self.continuous_connection_instance_ws: - a[count,0] = connection.id - a[count,1] = connection.get_pre_cell_id() - a[count,2] = connection.get_post_cell_id() - a[count,3] = connection.pre_segment - a[count,4] = connection.post_segment - a[count,5] = connection.pre_fraction_along - a[count,6] = connection.post_fraction_along - a[count,7] = connection.weight - count=count+1 - - - array = h5file.create_carray(projGroup, self.id, obj=a, title="Connections of cells in "+ self.id) + a[count, 0] = connection.id + a[count, 1] = connection.get_pre_cell_id() + a[count, 2] = connection.get_post_cell_id() + a[count, 3] = connection.pre_segment + a[count, 4] = connection.post_segment + a[count, 5] = connection.pre_fraction_along + a[count, 6] = connection.post_fraction_along + a[count, 7] = connection.weight + count = count + 1 + + array = h5file.create_carray( + projGroup, self.id, obj=a, title="Connections of cells in " + self.id + ) array._f_setattr("column_0", "id") array._f_setattr("column_1", "pre_cell_id") @@ -16739,25 +31093,73 @@ def exportHdf5(self, h5file, h5Group): for k in extra_cols: array._f_setattr(k, extra_cols[k]) - - # end class ContinuousProjection class ElectricalProjection(BaseProjection): """Projection between two populations consisting of electrical connections (gap junctions)""" + member_data_items_ = [ - MemberSpec_('electrical_connections', 'ElectricalConnection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ElectricalConnection', u'name': u'electricalConnection', u'minOccurs': u'0'}, None), - MemberSpec_('electrical_connection_instances', 'ElectricalConnectionInstance', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ElectricalConnectionInstance', u'name': u'electricalConnectionInstance', u'minOccurs': u'0'}, None), - MemberSpec_('electrical_connection_instance_ws', 'ElectricalConnectionInstanceW', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ElectricalConnectionInstanceW', u'name': u'electricalConnectionInstanceW', u'minOccurs': u'0'}, None), + MemberSpec_( + "electrical_connections", + "ElectricalConnection", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ElectricalConnection", + u"name": u"electricalConnection", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "electrical_connection_instances", + "ElectricalConnectionInstance", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ElectricalConnectionInstance", + u"name": u"electricalConnectionInstance", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "electrical_connection_instance_ws", + "ElectricalConnectionInstanceW", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ElectricalConnectionInstanceW", + u"name": u"electricalConnectionInstanceW", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = BaseProjection - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, electrical_connections=None, electrical_connection_instances=None, electrical_connection_instance_ws=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + presynaptic_population=None, + postsynaptic_population=None, + electrical_connections=None, + electrical_connection_instances=None, + electrical_connection_instance_ws=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ElectricalProjection, self).__init__(neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ElectricalProjection, self).__init__( + neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_ + ) if electrical_connections is None: self.electrical_connections = [] else: @@ -16770,62 +31172,143 @@ def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, post self.electrical_connection_instance_ws = [] else: self.electrical_connection_instance_ws = electrical_connection_instance_ws + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ElectricalProjection) + CurrentSubclassModule_, ElectricalProjection + ) if subclass is not None: return subclass(*args_, **kwargs_) if ElectricalProjection.subclass: return ElectricalProjection.subclass(*args_, **kwargs_) else: return ElectricalProjection(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.electrical_connections or - self.electrical_connection_instances or - self.electrical_connection_instance_ws or - super(ElectricalProjection, self).hasContent_() + self.electrical_connections + or self.electrical_connection_instances + or self.electrical_connection_instance_ws + or super(ElectricalProjection, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalProjection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ElectricalProjection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalProjection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ElectricalProjection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalProjection') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalProjection", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalProjection', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ElectricalProjection", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalProjection'): - super(ElectricalProjection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalProjection') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalProjection', fromsubclass_=False, pretty_print=True): - super(ElectricalProjection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ElectricalProjection", + ): + super(ElectricalProjection, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalProjection", + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalProjection", + fromsubclass_=False, + pretty_print=True, + ): + super(ElectricalProjection, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for electricalConnection_ in self.electrical_connections: - electricalConnection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='electricalConnection', pretty_print=pretty_print) + electricalConnection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="electricalConnection", + pretty_print=pretty_print, + ) for electricalConnectionInstance_ in self.electrical_connection_instances: - electricalConnectionInstance_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='electricalConnectionInstance', pretty_print=pretty_print) + electricalConnectionInstance_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="electricalConnectionInstance", + pretty_print=pretty_print, + ) for electricalConnectionInstanceW_ in self.electrical_connection_instance_ws: - electricalConnectionInstanceW_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='electricalConnectionInstanceW', pretty_print=pretty_print) + electricalConnectionInstanceW_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="electricalConnectionInstanceW", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -16833,90 +31316,105 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - super(ElectricalProjection, self).buildAttributes(node, attrs, already_processed) + super(ElectricalProjection, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'electricalConnection': + if nodeName_ == "electricalConnection": class_obj_ = self.get_class_obj_(child_, ElectricalConnection) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.electrical_connections.append(obj_) - obj_.original_tagname_ = 'electricalConnection' - elif nodeName_ == 'electricalConnectionInstance': + obj_.original_tagname_ = "electricalConnection" + elif nodeName_ == "electricalConnectionInstance": class_obj_ = self.get_class_obj_(child_, ElectricalConnectionInstance) obj_ = class_obj_.factory(parent_object_=self) obj_.build(child_) self.electrical_connection_instances.append(obj_) - obj_.original_tagname_ = 'electricalConnectionInstance' - elif nodeName_ == 'electricalConnectionInstanceW': + obj_.original_tagname_ = "electricalConnectionInstance" + elif nodeName_ == "electricalConnectionInstanceW": obj_ = ElectricalConnectionInstanceW.factory(parent_object_=self) obj_.build(child_) self.electrical_connection_instance_ws.append(obj_) - obj_.original_tagname_ = 'electricalConnectionInstanceW' + obj_.original_tagname_ = "electricalConnectionInstanceW" super(ElectricalProjection, self).buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): - """Export to HDF5 file. """ - #print("Exporting ElectricalProjection: "+str(self.id)+" as HDF5") - + """Export to HDF5 file.""" + # print("Exporting ElectricalProjection: "+str(self.id)+" as HDF5") import numpy - projGroup = h5file.create_group(h5Group, 'projection_'+self.id) + projGroup = h5file.create_group(h5Group, "projection_" + self.id) projGroup._f_setattr("id", self.id) projGroup._f_setattr("type", "electricalProjection") projGroup._f_setattr("presynapticPopulation", self.presynaptic_population) projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population) - syn = self.electrical_connections[0].synapse if len(self.electrical_connections)>0 else self.electrical_connection_instances[0].synapse if len(self.electrical_connection_instances)>0 else self.electrical_connection_instance_ws[0].synapse - projGroup._f_setattr("synapse", syn ) + syn = ( + self.electrical_connections[0].synapse + if len(self.electrical_connections) > 0 + else self.electrical_connection_instances[0].synapse + if len(self.electrical_connection_instances) > 0 + else self.electrical_connection_instance_ws[0].synapse + ) + projGroup._f_setattr("synapse", syn) cols = 7 extra_cols = {} - num_tot = len(self.electrical_connections)+len(self.electrical_connection_instances)+len(self.electrical_connection_instance_ws) - if len(self.electrical_connection_instance_ws)>0: - extra_cols["column_"+str(cols)] = "weight" - cols+=1 + num_tot = ( + len(self.electrical_connections) + + len(self.electrical_connection_instances) + + len(self.electrical_connection_instance_ws) + ) + if len(self.electrical_connection_instance_ws) > 0: + extra_cols["column_" + str(cols)] = "weight" + cols += 1 - #print("Exporting "+str(num_tot)+" electrical connections") + # print("Exporting "+str(num_tot)+" electrical connections") a = numpy.zeros([num_tot, cols], numpy.float32) - count=0 + count = 0 # TODO: optimise for single compartment cells, i.e. where no pre_segment/post_fraction_along etc. for connection in self.electrical_connections: - a[count,0] = connection.id - a[count,1] = connection.get_pre_cell_id() - a[count,2] = connection.get_post_cell_id() - a[count,3] = connection.pre_segment - a[count,4] = connection.post_segment - a[count,5] = connection.pre_fraction_along - a[count,6] = connection.post_fraction_along - count=count+1 + a[count, 0] = connection.id + a[count, 1] = connection.get_pre_cell_id() + a[count, 2] = connection.get_post_cell_id() + a[count, 3] = connection.pre_segment + a[count, 4] = connection.post_segment + a[count, 5] = connection.pre_fraction_along + a[count, 6] = connection.post_fraction_along + count = count + 1 for connection in self.electrical_connection_instances: - a[count,0] = connection.id - a[count,1] = connection.get_pre_cell_id() - a[count,2] = connection.get_post_cell_id() - a[count,3] = connection.pre_segment - a[count,4] = connection.post_segment - a[count,5] = connection.pre_fraction_along - a[count,6] = connection.post_fraction_along - count=count+1 + a[count, 0] = connection.id + a[count, 1] = connection.get_pre_cell_id() + a[count, 2] = connection.get_post_cell_id() + a[count, 3] = connection.pre_segment + a[count, 4] = connection.post_segment + a[count, 5] = connection.pre_fraction_along + a[count, 6] = connection.post_fraction_along + count = count + 1 for connection in self.electrical_connection_instance_ws: - a[count,0] = connection.id - a[count,1] = connection.get_pre_cell_id() - a[count,2] = connection.get_post_cell_id() - a[count,3] = connection.pre_segment - a[count,4] = connection.post_segment - a[count,5] = connection.pre_fraction_along - a[count,6] = connection.post_fraction_along - a[count,7] = connection.get_weight() - count=count+1 - - array = h5file.create_carray(projGroup, self.id, obj=a, title="Connections of cells in "+ self.id) + a[count, 0] = connection.id + a[count, 1] = connection.get_pre_cell_id() + a[count, 2] = connection.get_post_cell_id() + a[count, 3] = connection.pre_segment + a[count, 4] = connection.post_segment + a[count, 5] = connection.pre_fraction_along + a[count, 6] = connection.post_fraction_along + a[count, 7] = connection.get_weight() + count = count + 1 + + array = h5file.create_carray( + projGroup, self.id, obj=a, title="Connections of cells in " + self.id + ) array._f_setattr("column_0", "id") array._f_setattr("column_1", "pre_cell_id") @@ -16927,8 +31425,7 @@ def exportHdf5(self, h5file, h5Group): array._f_setattr("column_6", "post_fraction_along") for col in extra_cols.keys(): - array._f_setattr(col,extra_cols[col]) - + array._f_setattr(col, extra_cols[col]) # end class ElectricalProjection @@ -16936,20 +31433,36 @@ def exportHdf5(self, h5file, h5Group): class BaseConnectionNewFormat(BaseConnection): """Base of all synaptic connections with preCell, postSegment, etc. See BaseConnectionOldFormat""" + member_data_items_ = [ - MemberSpec_('pre_cell', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('pre_segment', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('pre_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), - MemberSpec_('post_cell', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('post_segment', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('post_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), + MemberSpec_("pre_cell", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("pre_segment", "NonNegativeInteger", 0, 1, {"use": "optional"}), + MemberSpec_("pre_fraction_along", "ZeroToOne", 0, 1, {"use": "optional"}), + MemberSpec_("post_cell", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("post_segment", "NonNegativeInteger", 0, 1, {"use": "optional"}), + MemberSpec_("post_fraction_along", "ZeroToOne", 0, 1, {"use": "optional"}), ] subclass = None superclass = BaseConnection - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConnectionNewFormat, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BaseConnectionNewFormat, self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.pre_cell = _cast(None, pre_cell) self.pre_segment = _cast(int, pre_segment) self.pre_fraction_along = _cast(float, pre_fraction_along) @@ -16957,82 +31470,176 @@ def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', p self.post_segment = _cast(int, post_segment) self.post_fraction_along = _cast(float, post_fraction_along) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseConnectionNewFormat) + CurrentSubclassModule_, BaseConnectionNewFormat + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseConnectionNewFormat.subclass: return BaseConnectionNewFormat.subclass(*args_, **kwargs_) else: return BaseConnectionNewFormat(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_: pass + def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. if value is not None and Validate_simpletypes_: if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' + % {"value": value} + ) if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' + % {"value": value} + ) + def hasContent_(self): - if ( - super(BaseConnectionNewFormat, self).hasContent_() - ): + if super(BaseConnectionNewFormat, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnectionNewFormat', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseConnectionNewFormat') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConnectionNewFormat", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseConnectionNewFormat") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionNewFormat') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConnectionNewFormat", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConnectionNewFormat', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConnectionNewFormat'): - super(BaseConnectionNewFormat, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionNewFormat') - if self.pre_cell is not None and 'pre_cell' not in already_processed: - already_processed.add('pre_cell') - outfile.write(' preCell=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.pre_cell), input_name='preCell')), )) - if self.pre_segment != 0 and 'pre_segment' not in already_processed: - already_processed.add('pre_segment') - outfile.write(' preSegment=%s' % (quote_attrib(self.pre_segment), )) - if self.pre_fraction_along != 0.5 and 'pre_fraction_along' not in already_processed: - already_processed.add('pre_fraction_along') - outfile.write(' preFractionAlong=%s' % (quote_attrib(self.pre_fraction_along), )) - if self.post_cell is not None and 'post_cell' not in already_processed: - already_processed.add('post_cell') - outfile.write(' postCell=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.post_cell), input_name='postCell')), )) - if self.post_segment != 0 and 'post_segment' not in already_processed: - already_processed.add('post_segment') - outfile.write(' postSegment=%s' % (quote_attrib(self.post_segment), )) - if self.post_fraction_along != 0.5 and 'post_fraction_along' not in already_processed: - already_processed.add('post_fraction_along') - outfile.write(' postFractionAlong=%s' % (quote_attrib(self.post_fraction_along), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseConnectionNewFormat", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseConnectionNewFormat", + ): + super(BaseConnectionNewFormat, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConnectionNewFormat", + ) + if self.pre_cell is not None and "pre_cell" not in already_processed: + already_processed.add("pre_cell") + outfile.write( + " preCell=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.pre_cell), input_name="preCell" + ) + ), + ) + ) + if self.pre_segment != 0 and "pre_segment" not in already_processed: + already_processed.add("pre_segment") + outfile.write(" preSegment=%s" % (quote_attrib(self.pre_segment),)) + if ( + self.pre_fraction_along != 0.5 + and "pre_fraction_along" not in already_processed + ): + already_processed.add("pre_fraction_along") + outfile.write( + " preFractionAlong=%s" % (quote_attrib(self.pre_fraction_along),) + ) + if self.post_cell is not None and "post_cell" not in already_processed: + already_processed.add("post_cell") + outfile.write( + " postCell=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.post_cell), input_name="postCell" + ) + ), + ) + ) + if self.post_segment != 0 and "post_segment" not in already_processed: + already_processed.add("post_segment") + outfile.write(" postSegment=%s" % (quote_attrib(self.post_segment),)) + if ( + self.post_fraction_along != 0.5 + and "post_fraction_along" not in already_processed + ): + already_processed.add("post_fraction_along") + outfile.write( + " postFractionAlong=%s" % (quote_attrib(self.post_fraction_along),) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnectionNewFormat', fromsubclass_=False, pretty_print=True): - super(BaseConnectionNewFormat, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConnectionNewFormat", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseConnectionNewFormat, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -17040,59 +31647,75 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('preCell', node) - if value is not None and 'preCell' not in already_processed: - already_processed.add('preCell') + value = find_attr_value_("preCell", node) + if value is not None and "preCell" not in already_processed: + already_processed.add("preCell") self.pre_cell = value - value = find_attr_value_('preSegment', node) - if value is not None and 'preSegment' not in already_processed: - already_processed.add('preSegment') + value = find_attr_value_("preSegment", node) + if value is not None and "preSegment" not in already_processed: + already_processed.add("preSegment") try: self.pre_segment = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.pre_segment < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.pre_segment) # validate type NonNegativeInteger - value = find_attr_value_('preFractionAlong', node) - if value is not None and 'preFractionAlong' not in already_processed: - already_processed.add('preFractionAlong') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.pre_segment + ) # validate type NonNegativeInteger + value = find_attr_value_("preFractionAlong", node) + if value is not None and "preFractionAlong" not in already_processed: + already_processed.add("preFractionAlong") try: self.pre_fraction_along = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (preFractionAlong): %s' % exp) - self.validate_ZeroToOne(self.pre_fraction_along) # validate type ZeroToOne - value = find_attr_value_('postCell', node) - if value is not None and 'postCell' not in already_processed: - already_processed.add('postCell') + raise ValueError( + "Bad float/double attribute (preFractionAlong): %s" % exp + ) + self.validate_ZeroToOne(self.pre_fraction_along) # validate type ZeroToOne + value = find_attr_value_("postCell", node) + if value is not None and "postCell" not in already_processed: + already_processed.add("postCell") self.post_cell = value - value = find_attr_value_('postSegment', node) - if value is not None and 'postSegment' not in already_processed: - already_processed.add('postSegment') + value = find_attr_value_("postSegment", node) + if value is not None and "postSegment" not in already_processed: + already_processed.add("postSegment") try: self.post_segment = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.post_segment < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.post_segment) # validate type NonNegativeInteger - value = find_attr_value_('postFractionAlong', node) - if value is not None and 'postFractionAlong' not in already_processed: - already_processed.add('postFractionAlong') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.post_segment + ) # validate type NonNegativeInteger + value = find_attr_value_("postFractionAlong", node) + if value is not None and "postFractionAlong" not in already_processed: + already_processed.add("postFractionAlong") try: self.post_fraction_along = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (postFractionAlong): %s' % exp) - self.validate_ZeroToOne(self.post_fraction_along) # validate type ZeroToOne - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise ValueError( + "Bad float/double attribute (postFractionAlong): %s" % exp + ) + self.validate_ZeroToOne(self.post_fraction_along) # validate type ZeroToOne + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseConnectionNewFormat, self).buildAttributes(node, attrs, already_processed) + super(BaseConnectionNewFormat, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConnectionNewFormat, self).buildChildren(child_, node, nodeName_, True) + super(BaseConnectionNewFormat, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class BaseConnectionNewFormat @@ -17100,20 +31723,36 @@ class BaseConnectionOldFormat(BaseConnection): """Base of all synaptic connections with preCellId, postSegmentId, etc. Note: this is not the best name for these attributes, since Id is superfluous, hence BaseConnectionNewFormat""" + member_data_items_ = [ - MemberSpec_('pre_cell_id', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('pre_segment_id', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('pre_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), - MemberSpec_('post_cell_id', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('post_segment_id', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('post_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), + MemberSpec_("pre_cell_id", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("pre_segment_id", "NonNegativeInteger", 0, 1, {"use": "optional"}), + MemberSpec_("pre_fraction_along", "ZeroToOne", 0, 1, {"use": "optional"}), + MemberSpec_("post_cell_id", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("post_segment_id", "NonNegativeInteger", 0, 1, {"use": "optional"}), + MemberSpec_("post_fraction_along", "ZeroToOne", 0, 1, {"use": "optional"}), ] subclass = None superclass = BaseConnection - def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id='0', pre_fraction_along='0.5', post_cell_id=None, post_segment_id='0', post_fraction_along='0.5', extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell_id=None, + pre_segment_id="0", + pre_fraction_along="0.5", + post_cell_id=None, + post_segment_id="0", + post_fraction_along="0.5", + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConnectionOldFormat, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BaseConnectionOldFormat, self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.pre_cell_id = _cast(None, pre_cell_id) self.pre_segment_id = _cast(int, pre_segment_id) self.pre_fraction_along = _cast(float, pre_fraction_along) @@ -17121,82 +31760,176 @@ def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id= self.post_segment_id = _cast(int, post_segment_id) self.post_fraction_along = _cast(float, post_fraction_along) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseConnectionOldFormat) + CurrentSubclassModule_, BaseConnectionOldFormat + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseConnectionOldFormat.subclass: return BaseConnectionOldFormat.subclass(*args_, **kwargs_) else: return BaseConnectionOldFormat(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. if value is not None and Validate_simpletypes_: pass + def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. if value is not None and Validate_simpletypes_: if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' + % {"value": value} + ) if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' + % {"value": value} + ) + def hasContent_(self): - if ( - super(BaseConnectionOldFormat, self).hasContent_() - ): + if super(BaseConnectionOldFormat, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnectionOldFormat', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseConnectionOldFormat') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConnectionOldFormat", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseConnectionOldFormat") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionOldFormat') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConnectionOldFormat", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConnectionOldFormat', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConnectionOldFormat'): - super(BaseConnectionOldFormat, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionOldFormat') - if self.pre_cell_id is not None and 'pre_cell_id' not in already_processed: - already_processed.add('pre_cell_id') - outfile.write(' preCellId=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.pre_cell_id), input_name='preCellId')), )) - if self.pre_segment_id != 0 and 'pre_segment_id' not in already_processed: - already_processed.add('pre_segment_id') - outfile.write(' preSegmentId=%s' % (quote_attrib(self.pre_segment_id), )) - if self.pre_fraction_along != 0.5 and 'pre_fraction_along' not in already_processed: - already_processed.add('pre_fraction_along') - outfile.write(' preFractionAlong=%s' % (quote_attrib(self.pre_fraction_along), )) - if self.post_cell_id is not None and 'post_cell_id' not in already_processed: - already_processed.add('post_cell_id') - outfile.write(' postCellId=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.post_cell_id), input_name='postCellId')), )) - if self.post_segment_id != 0 and 'post_segment_id' not in already_processed: - already_processed.add('post_segment_id') - outfile.write(' postSegmentId=%s' % (quote_attrib(self.post_segment_id), )) - if self.post_fraction_along != 0.5 and 'post_fraction_along' not in already_processed: - already_processed.add('post_fraction_along') - outfile.write(' postFractionAlong=%s' % (quote_attrib(self.post_fraction_along), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseConnectionOldFormat", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseConnectionOldFormat", + ): + super(BaseConnectionOldFormat, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConnectionOldFormat", + ) + if self.pre_cell_id is not None and "pre_cell_id" not in already_processed: + already_processed.add("pre_cell_id") + outfile.write( + " preCellId=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.pre_cell_id), input_name="preCellId" + ) + ), + ) + ) + if self.pre_segment_id != 0 and "pre_segment_id" not in already_processed: + already_processed.add("pre_segment_id") + outfile.write(" preSegmentId=%s" % (quote_attrib(self.pre_segment_id),)) + if ( + self.pre_fraction_along != 0.5 + and "pre_fraction_along" not in already_processed + ): + already_processed.add("pre_fraction_along") + outfile.write( + " preFractionAlong=%s" % (quote_attrib(self.pre_fraction_along),) + ) + if self.post_cell_id is not None and "post_cell_id" not in already_processed: + already_processed.add("post_cell_id") + outfile.write( + " postCellId=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.post_cell_id), input_name="postCellId" + ) + ), + ) + ) + if self.post_segment_id != 0 and "post_segment_id" not in already_processed: + already_processed.add("post_segment_id") + outfile.write(" postSegmentId=%s" % (quote_attrib(self.post_segment_id),)) + if ( + self.post_fraction_along != 0.5 + and "post_fraction_along" not in already_processed + ): + already_processed.add("post_fraction_along") + outfile.write( + " postFractionAlong=%s" % (quote_attrib(self.post_fraction_along),) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnectionOldFormat', fromsubclass_=False, pretty_print=True): - super(BaseConnectionOldFormat, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConnectionOldFormat", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseConnectionOldFormat, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -17204,76 +31937,130 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('preCellId', node) - if value is not None and 'preCellId' not in already_processed: - already_processed.add('preCellId') + value = find_attr_value_("preCellId", node) + if value is not None and "preCellId" not in already_processed: + already_processed.add("preCellId") self.pre_cell_id = value - value = find_attr_value_('preSegmentId', node) - if value is not None and 'preSegmentId' not in already_processed: - already_processed.add('preSegmentId') + value = find_attr_value_("preSegmentId", node) + if value is not None and "preSegmentId" not in already_processed: + already_processed.add("preSegmentId") try: self.pre_segment_id = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.pre_segment_id < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.pre_segment_id) # validate type NonNegativeInteger - value = find_attr_value_('preFractionAlong', node) - if value is not None and 'preFractionAlong' not in already_processed: - already_processed.add('preFractionAlong') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.pre_segment_id + ) # validate type NonNegativeInteger + value = find_attr_value_("preFractionAlong", node) + if value is not None and "preFractionAlong" not in already_processed: + already_processed.add("preFractionAlong") try: self.pre_fraction_along = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (preFractionAlong): %s' % exp) - self.validate_ZeroToOne(self.pre_fraction_along) # validate type ZeroToOne - value = find_attr_value_('postCellId', node) - if value is not None and 'postCellId' not in already_processed: - already_processed.add('postCellId') + raise ValueError( + "Bad float/double attribute (preFractionAlong): %s" % exp + ) + self.validate_ZeroToOne(self.pre_fraction_along) # validate type ZeroToOne + value = find_attr_value_("postCellId", node) + if value is not None and "postCellId" not in already_processed: + already_processed.add("postCellId") self.post_cell_id = value - value = find_attr_value_('postSegmentId', node) - if value is not None and 'postSegmentId' not in already_processed: - already_processed.add('postSegmentId') + value = find_attr_value_("postSegmentId", node) + if value is not None and "postSegmentId" not in already_processed: + already_processed.add("postSegmentId") try: self.post_segment_id = int(value) except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Bad integer attribute: %s" % exp) if self.post_segment_id < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.post_segment_id) # validate type NonNegativeInteger - value = find_attr_value_('postFractionAlong', node) - if value is not None and 'postFractionAlong' not in already_processed: - already_processed.add('postFractionAlong') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.post_segment_id + ) # validate type NonNegativeInteger + value = find_attr_value_("postFractionAlong", node) + if value is not None and "postFractionAlong" not in already_processed: + already_processed.add("postFractionAlong") try: self.post_fraction_along = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (postFractionAlong): %s' % exp) - self.validate_ZeroToOne(self.post_fraction_along) # validate type ZeroToOne - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise ValueError( + "Bad float/double attribute (postFractionAlong): %s" % exp + ) + self.validate_ZeroToOne(self.post_fraction_along) # validate type ZeroToOne + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseConnectionOldFormat, self).buildAttributes(node, attrs, already_processed) + super(BaseConnectionOldFormat, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConnectionOldFormat, self).buildChildren(child_, node, nodeName_, True) + super(BaseConnectionOldFormat, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class BaseConnectionOldFormat class Projection(BaseProjection): """Projection (set of synaptic connections) between two populations. Chemical/event based synaptic transmission""" + member_data_items_ = [ - MemberSpec_('synapse', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('connections', 'Connection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Connection', u'name': u'connection', u'minOccurs': u'0'}, None), - MemberSpec_('connection_wds', 'ConnectionWD', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ConnectionWD', u'name': u'connectionWD', u'minOccurs': u'0'}, None), + MemberSpec_("synapse", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_( + "connections", + "Connection", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"Connection", + u"name": u"connection", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "connection_wds", + "ConnectionWD", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"ConnectionWD", + u"name": u"connectionWD", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = BaseProjection - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, synapse=None, connections=None, connection_wds=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + presynaptic_population=None, + postsynaptic_population=None, + synapse=None, + connections=None, + connection_wds=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Projection, self).__init__(neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Projection, self).__init__( + neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_ + ) self.synapse = _cast(None, synapse) if connections is None: self.connections = [] @@ -17283,69 +32070,137 @@ def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, post self.connection_wds = [] else: self.connection_wds = connection_wds + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Projection) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Projection) if subclass is not None: return subclass(*args_, **kwargs_) if Projection.subclass: return Projection.subclass(*args_, **kwargs_) else: return Projection(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): if ( - self.connections or - self.connection_wds or - super(Projection, self).hasContent_() + self.connections + or self.connection_wds + or super(Projection, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Projection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Projection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Projection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Projection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Projection') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Projection" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Projection', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Projection", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Projection'): - super(Projection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Projection') - if self.synapse is not None and 'synapse' not in already_processed: - already_processed.add('synapse') - outfile.write(' synapse=%s' % (quote_attrib(self.synapse), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Projection', fromsubclass_=False, pretty_print=True): - super(Projection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Projection" + ): + super(Projection, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Projection" + ) + if self.synapse is not None and "synapse" not in already_processed: + already_processed.add("synapse") + outfile.write(" synapse=%s" % (quote_attrib(self.synapse),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Projection", + fromsubclass_=False, + pretty_print=True, + ): + super(Projection, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for connection_ in self.connections: - connection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='connection', pretty_print=pretty_print) + connection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="connection", + pretty_print=pretty_print, + ) for connectionWD_ in self.connection_wds: - connectionWD_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='connectionWD', pretty_print=pretty_print) + connectionWD_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="connectionWD", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -17353,41 +32208,42 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('synapse', node) - if value is not None and 'synapse' not in already_processed: - already_processed.add('synapse') + value = find_attr_value_("synapse", node) + if value is not None and "synapse" not in already_processed: + already_processed.add("synapse") self.synapse = value - self.validate_NmlId(self.synapse) # validate type NmlId + self.validate_NmlId(self.synapse) # validate type NmlId super(Projection, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'connection': + if nodeName_ == "connection": obj_ = Connection.factory(parent_object_=self) obj_.build(child_) self.connections.append(obj_) - obj_.original_tagname_ = 'connection' - elif nodeName_ == 'connectionWD': + obj_.original_tagname_ = "connection" + elif nodeName_ == "connectionWD": obj_ = ConnectionWD.factory(parent_object_=self) obj_.build(child_) self.connection_wds.append(obj_) - obj_.original_tagname_ = 'connectionWD' + obj_.original_tagname_ = "connectionWD" super(Projection, self).buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): - """Export to HDF5 file. """ - #print("Exporting Projection: "+str(self.id)+" as HDF5") - + """Export to HDF5 file.""" + # print("Exporting Projection: "+str(self.id)+" as HDF5") import numpy - projGroup = h5file.create_group(h5Group, 'projection_'+self.id) + projGroup = h5file.create_group(h5Group, "projection_" + self.id) projGroup._f_setattr("id", self.id) projGroup._f_setattr("type", "projection") projGroup._f_setattr("presynapticPopulation", self.presynaptic_population) projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population) projGroup._f_setattr("synapse", self.synapse) - #print("Exporting "+str(len(self.connections))+" connections, "+str(len(self.connection_wds))+" connections with weight") + # print("Exporting "+str(len(self.connections))+" connections, "+str(len(self.connection_wds))+" connections with weight") connection_wds = len(self.connection_wds) > 0 @@ -17397,142 +32253,242 @@ def exportHdf5(self, h5file, h5Group): from neuroml.utils import has_segment_fraction_info - include_segment_fraction = has_segment_fraction_info(self.connections) or has_segment_fraction_info(self.connection_wds) + include_segment_fraction = has_segment_fraction_info( + self.connections + ) or has_segment_fraction_info(self.connection_wds) if include_segment_fraction: - extra_cols["column_"+str(cols)] = "pre_segment_id" - extra_cols["column_"+str(cols+1)] = "post_segment_id" - extra_cols["column_"+str(cols+2)] = "pre_fraction_along" - extra_cols["column_"+str(cols+3)] = "post_fraction_along" - cols +=4 - + extra_cols["column_" + str(cols)] = "pre_segment_id" + extra_cols["column_" + str(cols + 1)] = "post_segment_id" + extra_cols["column_" + str(cols + 2)] = "pre_fraction_along" + extra_cols["column_" + str(cols + 3)] = "post_fraction_along" + cols += 4 if connection_wds: - extra_cols["column_"+str(cols)] = "weight" - extra_cols["column_"+str(cols+1)] = "delay" - cols+=2 + extra_cols["column_" + str(cols)] = "weight" + extra_cols["column_" + str(cols + 1)] = "delay" + cols += 2 - a = numpy.zeros([len(self.connections)+len(self.connection_wds), cols], numpy.float32) + a = numpy.zeros( + [len(self.connections) + len(self.connection_wds), cols], numpy.float32 + ) - count=0 + count = 0 for connection in self.connections: - ####a[count,0] = connection.id - a[count,0] = connection.get_pre_cell_id() - a[count,1] = connection.get_post_cell_id() - if include_segment_fraction: - a[count,2] = connection.pre_segment_id - a[count,3] = connection.post_segment_id - a[count,4] = connection.pre_fraction_along - a[count,5] = connection.post_fraction_along - count=count+1 + ####a[count,0] = connection.id + a[count, 0] = connection.get_pre_cell_id() + a[count, 1] = connection.get_post_cell_id() + if include_segment_fraction: + a[count, 2] = connection.pre_segment_id + a[count, 3] = connection.post_segment_id + a[count, 4] = connection.pre_fraction_along + a[count, 5] = connection.post_fraction_along + count = count + 1 for connection in self.connection_wds: - ###a[count,0] = connection.id - a[count,0] = connection.get_pre_cell_id() - a[count,1] = connection.get_post_cell_id() - - if include_segment_fraction: - a[count,2] = connection.pre_segment_id - a[count,3] = connection.post_segment_id - a[count,4] = connection.pre_fraction_along - a[count,5] = connection.post_fraction_along - - a[count,cols-2] = connection.weight - if 'ms' in connection.delay: - delay = float(connection.delay[:-2].strip()) - elif 's' in connection.delay: - delay = float(connection.delay[:-1].strip())*1000. - elif 'us' in connection.delay: - delay = float(connection.delay[:-2].strip())/1e3 - - a[count,cols-1] = delay - count=count+1 - - if len(a)>0: - array = h5file.create_carray(projGroup, self.id, obj=a, title="Connections of cells in "+ self.id) + ###a[count,0] = connection.id + a[count, 0] = connection.get_pre_cell_id() + a[count, 1] = connection.get_post_cell_id() + + if include_segment_fraction: + a[count, 2] = connection.pre_segment_id + a[count, 3] = connection.post_segment_id + a[count, 4] = connection.pre_fraction_along + a[count, 5] = connection.post_fraction_along + + a[count, cols - 2] = connection.weight + if "ms" in connection.delay: + delay = float(connection.delay[:-2].strip()) + elif "s" in connection.delay: + delay = float(connection.delay[:-1].strip()) * 1000.0 + elif "us" in connection.delay: + delay = float(connection.delay[:-2].strip()) / 1e3 + + a[count, cols - 1] = delay + count = count + 1 + + if len(a) > 0: + array = h5file.create_carray( + projGroup, self.id, obj=a, title="Connections of cells in " + self.id + ) ###array._f_setattr("column_0", "id") array._f_setattr("column_0", "pre_cell_id") array._f_setattr("column_1", "post_cell_id") for col in extra_cols.keys(): - array._f_setattr(col,extra_cols[col]) - + array._f_setattr(col, extra_cols[col]) def __str__(self): - return "Projection: "+self.id+" from "+self.presynaptic_population+" to "+self.postsynaptic_population+", synapse: "+self.synapse - - - + return ( + "Projection: " + + self.id + + " from " + + self.presynaptic_population + + " to " + + self.postsynaptic_population + + ", synapse: " + + self.synapse + ) # end class Projection class SpikeGeneratorRefPoisson(SpikeGeneratorPoisson): member_data_items_ = [ - MemberSpec_('minimum_isi', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("minimum_isi", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = SpikeGeneratorPoisson - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, minimum_isi=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + average_rate=None, + minimum_isi=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeGeneratorRefPoisson, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, average_rate, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(SpikeGeneratorRefPoisson, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + average_rate, + **kwargs_ + ) self.minimum_isi = _cast(None, minimum_isi) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeGeneratorRefPoisson) + CurrentSubclassModule_, SpikeGeneratorRefPoisson + ) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeGeneratorRefPoisson.subclass: return SpikeGeneratorRefPoisson.subclass(*args_, **kwargs_) else: return SpikeGeneratorRefPoisson(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(SpikeGeneratorRefPoisson, self).hasContent_() - ): + if super(SpikeGeneratorRefPoisson, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorRefPoisson', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeGeneratorRefPoisson') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGeneratorRefPoisson", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeGeneratorRefPoisson") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRefPoisson') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeGeneratorRefPoisson", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGeneratorRefPoisson', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeGeneratorRefPoisson", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGeneratorRefPoisson'): - super(SpikeGeneratorRefPoisson, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRefPoisson') - if self.minimum_isi is not None and 'minimum_isi' not in already_processed: - already_processed.add('minimum_isi') - outfile.write(' minimumISI=%s' % (quote_attrib(self.minimum_isi), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorRefPoisson', fromsubclass_=False, pretty_print=True): - super(SpikeGeneratorRefPoisson, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpikeGeneratorRefPoisson", + ): + super(SpikeGeneratorRefPoisson, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeGeneratorRefPoisson", + ) + if self.minimum_isi is not None and "minimum_isi" not in already_processed: + already_processed.add("minimum_isi") + outfile.write(" minimumISI=%s" % (quote_attrib(self.minimum_isi),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGeneratorRefPoisson", + fromsubclass_=False, + pretty_print=True, + ): + super(SpikeGeneratorRefPoisson, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -17540,76 +32496,183 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('minimumISI', node) - if value is not None and 'minimumISI' not in already_processed: - already_processed.add('minimumISI') + value = find_attr_value_("minimumISI", node) + if value is not None and "minimumISI" not in already_processed: + already_processed.add("minimumISI") self.minimum_isi = value - self.validate_Nml2Quantity_time(self.minimum_isi) # validate type Nml2Quantity_time - super(SpikeGeneratorRefPoisson, self).buildAttributes(node, attrs, already_processed) + self.validate_Nml2Quantity_time( + self.minimum_isi + ) # validate type Nml2Quantity_time + super(SpikeGeneratorRefPoisson, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeGeneratorRefPoisson, self).buildChildren(child_, node, nodeName_, True) + super(SpikeGeneratorRefPoisson, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class SpikeGeneratorRefPoisson class ConcentrationModel_D(DecayingPoolConcentrationModel): member_data_items_ = [ - MemberSpec_('type', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_("type", "xs:string", 0, 0, {"use": u"required"}), ] subclass = None superclass = DecayingPoolConcentrationModel - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, ion=None, resting_conc=None, decay_constant=None, shell_thickness=None, type='decayingPoolConcentrationModel', **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + ion=None, + resting_conc=None, + decay_constant=None, + shell_thickness=None, + type="decayingPoolConcentrationModel", + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ConcentrationModel_D, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, ion, resting_conc, decay_constant, shell_thickness, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ConcentrationModel_D, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + ion, + resting_conc, + decay_constant, + shell_thickness, + **kwargs_ + ) self.type = _cast(None, type) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ConcentrationModel_D) + CurrentSubclassModule_, ConcentrationModel_D + ) if subclass is not None: return subclass(*args_, **kwargs_) if ConcentrationModel_D.subclass: return ConcentrationModel_D.subclass(*args_, **kwargs_) else: return ConcentrationModel_D(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(ConcentrationModel_D, self).hasContent_() - ): + if super(ConcentrationModel_D, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConcentrationModel_D', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ConcentrationModel_D') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ConcentrationModel_D", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ConcentrationModel_D") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConcentrationModel_D') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ConcentrationModel_D", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ConcentrationModel_D', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ConcentrationModel_D", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ConcentrationModel_D'): - super(ConcentrationModel_D, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConcentrationModel_D') - if self.type != "decayingPoolConcentrationModel" and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConcentrationModel_D', fromsubclass_=False, pretty_print=True): - super(ConcentrationModel_D, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ConcentrationModel_D", + ): + super(ConcentrationModel_D, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ConcentrationModel_D", + ) + if ( + self.type != "decayingPoolConcentrationModel" + and "type" not in already_processed + ): + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ConcentrationModel_D", + fromsubclass_=False, + pretty_print=True, + ): + super(ConcentrationModel_D, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -17617,70 +32680,155 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - super(ConcentrationModel_D, self).buildAttributes(node, attrs, already_processed) + super(ConcentrationModel_D, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ConcentrationModel_D, self).buildChildren(child_, node, nodeName_, True) pass + + # end class ConcentrationModel_D class ChannelDensityNernstCa2(ChannelDensityNernst): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = ChannelDensityNernst - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + cond_density=None, + segment_groups="all", + segments=None, + ion=None, + variable_parameters=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNernstCa2, self).__init__(neuro_lex_id, id, ion_channel, cond_density, segment_groups, segments, ion, variable_parameters, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ChannelDensityNernstCa2, self).__init__( + neuro_lex_id, + id, + ion_channel, + cond_density, + segment_groups, + segments, + ion, + variable_parameters, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityNernstCa2) + CurrentSubclassModule_, ChannelDensityNernstCa2 + ) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityNernstCa2.subclass: return ChannelDensityNernstCa2.subclass(*args_, **kwargs_) else: return ChannelDensityNernstCa2(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(ChannelDensityNernstCa2, self).hasContent_() - ): + if super(ChannelDensityNernstCa2, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNernstCa2', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNernstCa2') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityNernstCa2", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityNernstCa2") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernstCa2') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNernstCa2", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNernstCa2', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityNernstCa2", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNernstCa2'): - super(ChannelDensityNernstCa2, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernstCa2') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNernstCa2', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNernstCa2, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityNernstCa2", + ): + super(ChannelDensityNernstCa2, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNernstCa2", + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityNernstCa2", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityNernstCa2, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -17688,78 +32836,180 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - super(ChannelDensityNernstCa2, self).buildAttributes(node, attrs, already_processed) + super(ChannelDensityNernstCa2, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ChannelDensityNernstCa2, self).buildChildren(child_, node, nodeName_, True) + super(ChannelDensityNernstCa2, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class ChannelDensityNernstCa2 class ChannelDensityVShift(ChannelDensity): member_data_items_ = [ - MemberSpec_('v_shift', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_("v_shift", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), ] subclass = None superclass = ChannelDensity - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, erev=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, v_shift=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + cond_density=None, + erev=None, + segment_groups="all", + segments=None, + ion=None, + variable_parameters=None, + v_shift=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityVShift, self).__init__(neuro_lex_id, id, ion_channel, cond_density, erev, segment_groups, segments, ion, variable_parameters, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ChannelDensityVShift, self).__init__( + neuro_lex_id, + id, + ion_channel, + cond_density, + erev, + segment_groups, + segments, + ion, + variable_parameters, + **kwargs_ + ) self.v_shift = _cast(None, v_shift) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityVShift) + CurrentSubclassModule_, ChannelDensityVShift + ) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityVShift.subclass: return ChannelDensityVShift.subclass(*args_, **kwargs_) else: return ChannelDensityVShift(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def hasContent_(self): - if ( - super(ChannelDensityVShift, self).hasContent_() - ): + if super(ChannelDensityVShift, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityVShift', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityVShift') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityVShift", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityVShift") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityVShift') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityVShift", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityVShift', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityVShift", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityVShift'): - super(ChannelDensityVShift, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityVShift') - if self.v_shift is not None and 'v_shift' not in already_processed: - already_processed.add('v_shift') - outfile.write(' vShift=%s' % (quote_attrib(self.v_shift), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityVShift', fromsubclass_=False, pretty_print=True): - super(ChannelDensityVShift, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityVShift", + ): + super(ChannelDensityVShift, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityVShift", + ) + if self.v_shift is not None and "v_shift" not in already_processed: + already_processed.add("v_shift") + outfile.write(" vShift=%s" % (quote_attrib(self.v_shift),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityVShift", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityVShift, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -17767,16 +33017,24 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('vShift', node) - if value is not None and 'vShift' not in already_processed: - already_processed.add('vShift') + value = find_attr_value_("vShift", node) + if value is not None and "vShift" not in already_processed: + already_processed.add("vShift") self.v_shift = value - self.validate_Nml2Quantity_voltage(self.v_shift) # validate type Nml2Quantity_voltage - super(ChannelDensityVShift, self).buildAttributes(node, attrs, already_processed) + self.validate_Nml2Quantity_voltage( + self.v_shift + ) # validate type Nml2Quantity_voltage + super(ChannelDensityVShift, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ChannelDensityVShift, self).buildChildren(child_, node, nodeName_, True) pass + + # end class ChannelDensityVShift @@ -17785,86 +33043,216 @@ class Cell(BaseCell): points to the id of the morphology Should only be used if biophysicalProperties element is outside the cell. This points to the id of the biophysicalProperties""" + member_data_items_ = [ - MemberSpec_('morphology_attr', 'xs:string', 0, 1, {'use': 'optional'}), - MemberSpec_('biophysical_properties_attr', 'xs:string', 0, 1, {'use': 'optional'}), - MemberSpec_('morphology', 'Morphology', 0, 1, {u'type': u'Morphology', u'name': u'morphology', u'minOccurs': u'0'}, None), - MemberSpec_('biophysical_properties', 'BiophysicalProperties', 0, 1, {u'type': u'BiophysicalProperties', u'name': u'biophysicalProperties', u'minOccurs': u'0'}, None), + MemberSpec_("morphology_attr", "xs:string", 0, 1, {"use": "optional"}), + MemberSpec_( + "biophysical_properties_attr", "xs:string", 0, 1, {"use": "optional"} + ), + MemberSpec_( + "morphology", + "Morphology", + 0, + 1, + {u"type": u"Morphology", u"name": u"morphology", u"minOccurs": u"0"}, + None, + ), + MemberSpec_( + "biophysical_properties", + "BiophysicalProperties", + 0, + 1, + { + u"type": u"BiophysicalProperties", + u"name": u"biophysicalProperties", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, morphology_attr=None, biophysical_properties_attr=None, morphology=None, biophysical_properties=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + morphology_attr=None, + biophysical_properties_attr=None, + morphology=None, + biophysical_properties=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Cell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Cell, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.morphology_attr = _cast(None, morphology_attr) self.biophysical_properties_attr = _cast(None, biophysical_properties_attr) self.morphology = morphology self.biophysical_properties = biophysical_properties self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Cell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Cell) if subclass is not None: return subclass(*args_, **kwargs_) if Cell.subclass: return Cell.subclass(*args_, **kwargs_) else: return Cell(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.morphology is not None or - self.biophysical_properties is not None or - super(Cell, self).hasContent_() + self.morphology is not None + or self.biophysical_properties is not None + or super(Cell, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Cell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Cell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Cell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Cell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Cell" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Cell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Cell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Cell'): - super(Cell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell') - if self.morphology_attr is not None and 'morphology_attr' not in already_processed: - already_processed.add('morphology_attr') - outfile.write(' morphology=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.morphology_attr), input_name='morphology_attr')), )) - if self.biophysical_properties_attr is not None and 'biophysical_properties_attr' not in already_processed: - already_processed.add('biophysical_properties_attr') - outfile.write(' biophysicalProperties=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.biophysical_properties_attr), input_name='biophysical_properties_attr')), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Cell" + ): + super(Cell, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Cell" + ) + if ( + self.morphology_attr is not None + and "morphology_attr" not in already_processed + ): + already_processed.add("morphology_attr") + outfile.write( + " morphology=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.morphology_attr), + input_name="morphology_attr", + ) + ), + ) + ) + if ( + self.biophysical_properties_attr is not None + and "biophysical_properties_attr" not in already_processed + ): + already_processed.add("biophysical_properties_attr") + outfile.write( + " biophysicalProperties=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.biophysical_properties_attr), + input_name="biophysical_properties_attr", + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Cell', fromsubclass_=False, pretty_print=True): - super(Cell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Cell", + fromsubclass_=False, + pretty_print=True, + ): + super(Cell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.morphology is not None: - self.morphology.export(outfile, level, namespaceprefix_, namespacedef_='', name_='morphology', pretty_print=pretty_print) + self.morphology.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="morphology", + pretty_print=pretty_print, + ) if self.biophysical_properties is not None: - self.biophysical_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='biophysicalProperties', pretty_print=pretty_print) + self.biophysical_properties.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="biophysicalProperties", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -17872,34 +33260,35 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('morphology', node) - if value is not None and 'morphology_attr' not in already_processed: - already_processed.add('morphology_attr') + value = find_attr_value_("morphology", node) + if value is not None and "morphology_attr" not in already_processed: + already_processed.add("morphology_attr") self.morphology_attr = value - value = find_attr_value_('biophysicalProperties', node) - if value is not None and 'biophysical_properties_attr' not in already_processed: - already_processed.add('biophysical_properties_attr') + value = find_attr_value_("biophysicalProperties", node) + if value is not None and "biophysical_properties_attr" not in already_processed: + already_processed.add("biophysical_properties_attr") self.biophysical_properties_attr = value - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(Cell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'morphology': + if nodeName_ == "morphology": obj_ = Morphology.factory(parent_object_=self) obj_.build(child_) self.morphology = obj_ - obj_.original_tagname_ = 'morphology' - elif nodeName_ == 'biophysicalProperties': + obj_.original_tagname_ = "morphology" + elif nodeName_ == "biophysicalProperties": obj_ = BiophysicalProperties.factory(parent_object_=self) obj_.build(child_) self.biophysical_properties = obj_ - obj_.original_tagname_ = 'biophysicalProperties' + obj_.original_tagname_ = "biophysicalProperties" super(Cell, self).buildChildren(child_, node, nodeName_, True) - # Get segment object by its id def get_segment(self, segment_id): # type: (str) -> Segment @@ -17915,7 +33304,9 @@ def get_segment(self, segment_id): if segment.id == segment_id: return segment - raise Exception("Segment with id "+str(segment_id)+" not found in cell "+str(self.id)) + raise Exception( + "Segment with id " + str(segment_id) + " not found in cell " + str(self.id) + ) def get_segments_by_substring(self, substring): # type: (str) -> dict @@ -17933,10 +33324,14 @@ def get_segments_by_substring(self, substring): if substring in segment.id: segments[segment.id] = segment if len(segments) == 0: - raise Exception("Segments with id matching "+str(substring)+" not found in cell "+str(self.id)) + raise Exception( + "Segments with id matching " + + str(substring) + + " not found in cell " + + str(self.id) + ) return segments - # Get the proximal point of a segment, even the proximal field is None and # so the proximal point is on the parent (at a point set by fraction_along) def get_actual_proximal(self, segment_id): @@ -17957,15 +33352,19 @@ def get_actual_proximal(self, segment_id): parent = self.get_segment(segment.parent.segments) fract = float(segment.parent.fraction_along) - if fract==1: + if fract == 1: return parent.distal - elif fract==0: + elif fract == 0: return self.get_actual_proximal(segment.parent.segments) else: pd = parent.distal pp = self.get_actual_proximal(segment.parent.segments) - p = Point3DWithDiam((1-fract)*pp.x+fract*pd.x, (1-fract)*pp.y+fract*pd.y, (1-fract)*pp.z+fract*pd.z) - p.diameter = (1-fract)*pp.diameter+fract*pd.diameter + p = Point3DWithDiam( + (1 - fract) * pp.x + fract * pd.x, + (1 - fract) * pp.y + fract * pd.y, + (1 - fract) * pp.z + fract * pd.z, + ) + p.diameter = (1 - fract) * pp.diameter + fract * pd.diameter return p @@ -18035,9 +33434,7 @@ def get_segment_ids_vs_segments(self): return segments - def get_all_segments_in_group(self, - segment_group, - assume_all_means_all=True): + def get_all_segments_in_group(self, segment_group, assume_all_means_all=True): # type: (SegmentGroup, bool) -> List[Segment] """Get all the segments in a segment group of the cell. @@ -18058,10 +33455,14 @@ def get_all_segments_in_group(self, segment_group = sg if isinstance(segment_group, str): - if assume_all_means_all and segment_group=='all': # i.e. wasn't explicitly defined, but assume it means all segments + if ( + assume_all_means_all and segment_group == "all" + ): # i.e. wasn't explicitly defined, but assume it means all segments return [seg.id for seg in self.morphology.segments] - raise Exception('No segment group '+segment_group+ ' found in cell '+self.id) + raise Exception( + "No segment group " + segment_group + " found in cell " + self.id + ) all_segs = [] @@ -18069,7 +33470,6 @@ def get_all_segments_in_group(self, if not member.segments in all_segs: all_segs.append(member.segments) - for include in segment_group.includes: segs_here = self.get_all_segments_in_group(include.segment_groups) for s in segs_here: @@ -18078,13 +33478,14 @@ def get_all_segments_in_group(self, return all_segs - - def get_ordered_segments_in_groups(self, - group_list, - check_parentage=False, - include_cumulative_lengths=False, - include_path_lengths=False, - path_length_metric="Path Length from root"): # Only option supported + def get_ordered_segments_in_groups( + self, + group_list, + check_parentage=False, + include_cumulative_lengths=False, + include_path_lengths=False, + path_length_metric="Path Length from root", + ): # Only option supported # type: (List, bool, bool, bool, str) -> Dict """ Get ordered list of segments in specified groups @@ -18120,12 +33521,13 @@ def get_ordered_segments_in_groups(self, ord_segs = {} from operator import attrgetter + for key in unord_segs.keys(): segs = unord_segs[key] - if len(segs)==1 or len(segs)==0: - ord_segs[key]=segs + if len(segs) == 1 or len(segs) == 0: + ord_segs[key] = segs else: - ord_segs[key]=sorted(segs,key=attrgetter('id'),reverse=False) + ord_segs[key] = sorted(segs, key=attrgetter("id"), reverse=False) if check_parentage: # check parent ordering @@ -18135,10 +33537,14 @@ def get_ordered_segments_in_groups(self, for s in ord_segs[key]: if s.id != ord_segs[key][0].id: if not s.parent or not s.parent.segments in existing_ids: - raise Exception("Problem with finding parent of seg: "+str(s)+" in list: "+str(ord_segs)) + raise Exception( + "Problem with finding parent of seg: " + + str(s) + + " in list: " + + str(ord_segs) + ) existing_ids.append(s.id) - if include_cumulative_lengths or include_path_lengths: import math @@ -18156,12 +33562,15 @@ def get_ordered_segments_in_groups(self, length = self.get_segment_length(seg.id) - if not seg.parent or not seg.parent.segments in path_lengths_to_distal[key]: + if ( + not seg.parent + or not seg.parent.segments in path_lengths_to_distal[key] + ): path_lengths_to_proximal[key][seg.id] = 0 last_seg = seg par_seg_element = seg.parent - while par_seg_element!=None: + while par_seg_element != None: par_seg = segments[par_seg_element.segments] d = par_seg.distal @@ -18171,28 +33580,30 @@ def get_ordered_segments_in_groups(self, par_seg_parent_seg = segments[par_seg.parent.segments] p = par_seg_parent_seg.distal - par_length = math.sqrt( (d.x-p.x)**2 + (d.y-p.y)**2 + (d.z-p.z)**2 ) + par_length = math.sqrt( + (d.x - p.x) ** 2 + (d.y - p.y) ** 2 + (d.z - p.z) ** 2 + ) fract = float(last_seg.parent.fraction_along) - path_lengths_to_proximal[key][seg.id] += par_length*fract + path_lengths_to_proximal[key][seg.id] += par_length * fract last_seg = par_seg par_seg_element = par_seg.parent - else: pd = path_lengths_to_distal[key][seg.parent.segments] pp = path_lengths_to_proximal[key][seg.parent.segments] fract = float(seg.parent.fraction_along) - path_lengths_to_proximal[key][seg.id] = pp + (pd - pp)*fract + path_lengths_to_proximal[key][seg.id] = pp + (pd - pp) * fract - path_lengths_to_distal[key][seg.id] = path_lengths_to_proximal[key][seg.id] + length + path_lengths_to_distal[key][seg.id] = ( + path_lengths_to_proximal[key][seg.id] + length + ) tot_len += length cumulative_lengths[key].append(tot_len) - if include_path_lengths and not include_cumulative_lengths: return ord_segs, path_lengths_to_proximal, path_lengths_to_distal @@ -18203,7 +33614,12 @@ def get_ordered_segments_in_groups(self, if include_cumulative_lengths and include_path_lengths: - return ord_segs, cumulative_lengths, path_lengths_to_proximal, path_lengths_to_distal + return ( + ord_segs, + cumulative_lengths, + path_lengths_to_proximal, + path_lengths_to_distal, + ) return ord_segs @@ -18221,7 +33637,9 @@ def get_segment_group(self, sg_id): if sg.id == sg_id: return sg - raise Exception("Segment group with id "+str(sg_id)+" not found in cell "+str(self.id)) + raise Exception( + "Segment group with id " + str(sg_id) + " not found in cell " + str(self.id) + ) def get_segment_groups_by_substring(self, substring): # type: (str) -> dict @@ -18237,17 +33655,21 @@ def get_segment_groups_by_substring(self, substring): if substring in sg.id: sgs[sg.id] = sg if len(sgs) == 0: - raise Exception("Segment group with id matching "+str(substring)+" not found in cell "+str(self.id)) + raise Exception( + "Segment group with id matching " + + str(substring) + + " not found in cell " + + str(self.id) + ) return sgs - def summary(self): """Print cell summary.""" print("*******************************************************") - print("* Cell: "+str(self.id)) - print("* Notes: "+str(self.notes)) - print("* Segments: "+str(len(self.morphology.segments))) - print("* SegmentGroups: "+str(len(self.morphology.segment_groups))) + print("* Cell: " + str(self.id)) + print("* Notes: " + str(self.notes)) + print("* Segments: " + str(len(self.morphology.segments))) + print("* SegmentGroups: " + str(len(self.morphology.segment_groups))) print("*******************************************************") # end class Cell @@ -18255,34 +33677,93 @@ def summary(self): class PinskyRinzelCA3Cell(BaseCell): member_data_items_ = [ - MemberSpec_('i_soma', 'Nml2Quantity_currentDensity', 0, 0, {'use': u'required'}), - MemberSpec_('i_dend', 'Nml2Quantity_currentDensity', 0, 0, {'use': u'required'}), - MemberSpec_('gc', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_ls', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_ld', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_na', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_kdr', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_ca', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_kahp', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_kc', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_nmda', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_ampa', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('e_na', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('e_ca', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('e_k', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('e_l', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('qd0', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('pp', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('alphac', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('betac', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('cm', 'Nml2Quantity_specificCapacitance', 0, 0, {'use': u'required'}), + MemberSpec_( + "i_soma", "Nml2Quantity_currentDensity", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "i_dend", "Nml2Quantity_currentDensity", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "gc", "Nml2Quantity_conductanceDensity", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "g_ls", "Nml2Quantity_conductanceDensity", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "g_ld", "Nml2Quantity_conductanceDensity", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "g_na", "Nml2Quantity_conductanceDensity", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "g_kdr", "Nml2Quantity_conductanceDensity", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "g_ca", "Nml2Quantity_conductanceDensity", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "g_kahp", "Nml2Quantity_conductanceDensity", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "g_kc", "Nml2Quantity_conductanceDensity", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "g_nmda", "Nml2Quantity_conductanceDensity", 0, 0, {"use": u"required"} + ), + MemberSpec_( + "g_ampa", "Nml2Quantity_conductanceDensity", 0, 0, {"use": u"required"} + ), + MemberSpec_("e_na", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("e_ca", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("e_k", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("e_l", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("qd0", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("pp", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("alphac", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("betac", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_( + "cm", "Nml2Quantity_specificCapacitance", 0, 0, {"use": u"required"} + ), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, i_soma=None, i_dend=None, gc=None, g_ls=None, g_ld=None, g_na=None, g_kdr=None, g_ca=None, g_kahp=None, g_kc=None, g_nmda=None, g_ampa=None, e_na=None, e_ca=None, e_k=None, e_l=None, qd0=None, pp=None, alphac=None, betac=None, cm=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + i_soma=None, + i_dend=None, + gc=None, + g_ls=None, + g_ld=None, + g_na=None, + g_kdr=None, + g_ca=None, + g_kahp=None, + g_kc=None, + g_nmda=None, + g_ampa=None, + e_na=None, + e_ca=None, + e_k=None, + e_l=None, + qd0=None, + pp=None, + alphac=None, + betac=None, + cm=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(PinskyRinzelCA3Cell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(PinskyRinzelCA3Cell, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.i_soma = _cast(None, i_soma) self.i_dend = _cast(None, i_dend) self.gc = _cast(None, gc) @@ -18304,147 +33785,264 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti self.alphac = _cast(None, alphac) self.betac = _cast(None, betac) self.cm = _cast(None, cm) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, PinskyRinzelCA3Cell) + CurrentSubclassModule_, PinskyRinzelCA3Cell + ) if subclass is not None: return subclass(*args_, **kwargs_) if PinskyRinzelCA3Cell.subclass: return PinskyRinzelCA3Cell.subclass(*args_, **kwargs_) else: return PinskyRinzelCA3Cell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_currentDensity(self, value): # Validate type Nml2Quantity_currentDensity, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_currentDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_currentDensity_patterns_, )) - validate_Nml2Quantity_currentDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A_per_m2|uA_per_cm2|mA_per_cm2)$']] + self.validate_Nml2Quantity_currentDensity_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_currentDensity_patterns_, + ) + ) + + validate_Nml2Quantity_currentDensity_patterns_ = [ + [ + u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A_per_m2|uA_per_cm2|mA_per_cm2)$" + ] + ] + def validate_Nml2Quantity_conductanceDensity(self, value): # Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductanceDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) - validate_Nml2Quantity_conductanceDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$']] + self.validate_Nml2Quantity_conductanceDensity_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductanceDensity_patterns_, + ) + ) + + validate_Nml2Quantity_conductanceDensity_patterns_ = [ + [ + u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$" + ] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$"] + ] + def validate_Nml2Quantity_specificCapacitance(self, value): # Validate type Nml2Quantity_specificCapacitance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_specificCapacitance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_specificCapacitance_patterns_, )) - validate_Nml2Quantity_specificCapacitance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F_per_m2|uF_per_cm2)$']] + self.validate_Nml2Quantity_specificCapacitance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_specificCapacitance_patterns_, + ) + ) + + validate_Nml2Quantity_specificCapacitance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F_per_m2|uF_per_cm2)$"] + ] + def hasContent_(self): - if ( - super(PinskyRinzelCA3Cell, self).hasContent_() - ): + if super(PinskyRinzelCA3Cell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PinskyRinzelCA3Cell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('PinskyRinzelCA3Cell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PinskyRinzelCA3Cell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PinskyRinzelCA3Cell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PinskyRinzelCA3Cell') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PinskyRinzelCA3Cell", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PinskyRinzelCA3Cell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PinskyRinzelCA3Cell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PinskyRinzelCA3Cell'): - super(PinskyRinzelCA3Cell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PinskyRinzelCA3Cell') - if self.i_soma is not None and 'i_soma' not in already_processed: - already_processed.add('i_soma') - outfile.write(' iSoma=%s' % (quote_attrib(self.i_soma), )) - if self.i_dend is not None and 'i_dend' not in already_processed: - already_processed.add('i_dend') - outfile.write(' iDend=%s' % (quote_attrib(self.i_dend), )) - if self.gc is not None and 'gc' not in already_processed: - already_processed.add('gc') - outfile.write(' gc=%s' % (quote_attrib(self.gc), )) - if self.g_ls is not None and 'g_ls' not in already_processed: - already_processed.add('g_ls') - outfile.write(' gLs=%s' % (quote_attrib(self.g_ls), )) - if self.g_ld is not None and 'g_ld' not in already_processed: - already_processed.add('g_ld') - outfile.write(' gLd=%s' % (quote_attrib(self.g_ld), )) - if self.g_na is not None and 'g_na' not in already_processed: - already_processed.add('g_na') - outfile.write(' gNa=%s' % (quote_attrib(self.g_na), )) - if self.g_kdr is not None and 'g_kdr' not in already_processed: - already_processed.add('g_kdr') - outfile.write(' gKdr=%s' % (quote_attrib(self.g_kdr), )) - if self.g_ca is not None and 'g_ca' not in already_processed: - already_processed.add('g_ca') - outfile.write(' gCa=%s' % (quote_attrib(self.g_ca), )) - if self.g_kahp is not None and 'g_kahp' not in already_processed: - already_processed.add('g_kahp') - outfile.write(' gKahp=%s' % (quote_attrib(self.g_kahp), )) - if self.g_kc is not None and 'g_kc' not in already_processed: - already_processed.add('g_kc') - outfile.write(' gKC=%s' % (quote_attrib(self.g_kc), )) - if self.g_nmda is not None and 'g_nmda' not in already_processed: - already_processed.add('g_nmda') - outfile.write(' gNmda=%s' % (quote_attrib(self.g_nmda), )) - if self.g_ampa is not None and 'g_ampa' not in already_processed: - already_processed.add('g_ampa') - outfile.write(' gAmpa=%s' % (quote_attrib(self.g_ampa), )) - if self.e_na is not None and 'e_na' not in already_processed: - already_processed.add('e_na') - outfile.write(' eNa=%s' % (quote_attrib(self.e_na), )) - if self.e_ca is not None and 'e_ca' not in already_processed: - already_processed.add('e_ca') - outfile.write(' eCa=%s' % (quote_attrib(self.e_ca), )) - if self.e_k is not None and 'e_k' not in already_processed: - already_processed.add('e_k') - outfile.write(' eK=%s' % (quote_attrib(self.e_k), )) - if self.e_l is not None and 'e_l' not in already_processed: - already_processed.add('e_l') - outfile.write(' eL=%s' % (quote_attrib(self.e_l), )) - if self.qd0 is not None and 'qd0' not in already_processed: - already_processed.add('qd0') - outfile.write(' qd0=%s' % (quote_attrib(self.qd0), )) - if self.pp is not None and 'pp' not in already_processed: - already_processed.add('pp') - outfile.write(' pp=%s' % (quote_attrib(self.pp), )) - if self.alphac is not None and 'alphac' not in already_processed: - already_processed.add('alphac') - outfile.write(' alphac=%s' % (quote_attrib(self.alphac), )) - if self.betac is not None and 'betac' not in already_processed: - already_processed.add('betac') - outfile.write(' betac=%s' % (quote_attrib(self.betac), )) - if self.cm is not None and 'cm' not in already_processed: - already_processed.add('cm') - outfile.write(' cm=%s' % (quote_attrib(self.cm), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PinskyRinzelCA3Cell', fromsubclass_=False, pretty_print=True): - super(PinskyRinzelCA3Cell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PinskyRinzelCA3Cell", + ): + super(PinskyRinzelCA3Cell, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PinskyRinzelCA3Cell", + ) + if self.i_soma is not None and "i_soma" not in already_processed: + already_processed.add("i_soma") + outfile.write(" iSoma=%s" % (quote_attrib(self.i_soma),)) + if self.i_dend is not None and "i_dend" not in already_processed: + already_processed.add("i_dend") + outfile.write(" iDend=%s" % (quote_attrib(self.i_dend),)) + if self.gc is not None and "gc" not in already_processed: + already_processed.add("gc") + outfile.write(" gc=%s" % (quote_attrib(self.gc),)) + if self.g_ls is not None and "g_ls" not in already_processed: + already_processed.add("g_ls") + outfile.write(" gLs=%s" % (quote_attrib(self.g_ls),)) + if self.g_ld is not None and "g_ld" not in already_processed: + already_processed.add("g_ld") + outfile.write(" gLd=%s" % (quote_attrib(self.g_ld),)) + if self.g_na is not None and "g_na" not in already_processed: + already_processed.add("g_na") + outfile.write(" gNa=%s" % (quote_attrib(self.g_na),)) + if self.g_kdr is not None and "g_kdr" not in already_processed: + already_processed.add("g_kdr") + outfile.write(" gKdr=%s" % (quote_attrib(self.g_kdr),)) + if self.g_ca is not None and "g_ca" not in already_processed: + already_processed.add("g_ca") + outfile.write(" gCa=%s" % (quote_attrib(self.g_ca),)) + if self.g_kahp is not None and "g_kahp" not in already_processed: + already_processed.add("g_kahp") + outfile.write(" gKahp=%s" % (quote_attrib(self.g_kahp),)) + if self.g_kc is not None and "g_kc" not in already_processed: + already_processed.add("g_kc") + outfile.write(" gKC=%s" % (quote_attrib(self.g_kc),)) + if self.g_nmda is not None and "g_nmda" not in already_processed: + already_processed.add("g_nmda") + outfile.write(" gNmda=%s" % (quote_attrib(self.g_nmda),)) + if self.g_ampa is not None and "g_ampa" not in already_processed: + already_processed.add("g_ampa") + outfile.write(" gAmpa=%s" % (quote_attrib(self.g_ampa),)) + if self.e_na is not None and "e_na" not in already_processed: + already_processed.add("e_na") + outfile.write(" eNa=%s" % (quote_attrib(self.e_na),)) + if self.e_ca is not None and "e_ca" not in already_processed: + already_processed.add("e_ca") + outfile.write(" eCa=%s" % (quote_attrib(self.e_ca),)) + if self.e_k is not None and "e_k" not in already_processed: + already_processed.add("e_k") + outfile.write(" eK=%s" % (quote_attrib(self.e_k),)) + if self.e_l is not None and "e_l" not in already_processed: + already_processed.add("e_l") + outfile.write(" eL=%s" % (quote_attrib(self.e_l),)) + if self.qd0 is not None and "qd0" not in already_processed: + already_processed.add("qd0") + outfile.write(" qd0=%s" % (quote_attrib(self.qd0),)) + if self.pp is not None and "pp" not in already_processed: + already_processed.add("pp") + outfile.write(" pp=%s" % (quote_attrib(self.pp),)) + if self.alphac is not None and "alphac" not in already_processed: + already_processed.add("alphac") + outfile.write(" alphac=%s" % (quote_attrib(self.alphac),)) + if self.betac is not None and "betac" not in already_processed: + already_processed.add("betac") + outfile.write(" betac=%s" % (quote_attrib(self.betac),)) + if self.cm is not None and "cm" not in already_processed: + already_processed.add("cm") + outfile.write(" cm=%s" % (quote_attrib(self.cm),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PinskyRinzelCA3Cell", + fromsubclass_=False, + pretty_print=True, + ): + super(PinskyRinzelCA3Cell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -18452,208 +34050,337 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('iSoma', node) - if value is not None and 'iSoma' not in already_processed: - already_processed.add('iSoma') + value = find_attr_value_("iSoma", node) + if value is not None and "iSoma" not in already_processed: + already_processed.add("iSoma") self.i_soma = value - self.validate_Nml2Quantity_currentDensity(self.i_soma) # validate type Nml2Quantity_currentDensity - value = find_attr_value_('iDend', node) - if value is not None and 'iDend' not in already_processed: - already_processed.add('iDend') + self.validate_Nml2Quantity_currentDensity( + self.i_soma + ) # validate type Nml2Quantity_currentDensity + value = find_attr_value_("iDend", node) + if value is not None and "iDend" not in already_processed: + already_processed.add("iDend") self.i_dend = value - self.validate_Nml2Quantity_currentDensity(self.i_dend) # validate type Nml2Quantity_currentDensity - value = find_attr_value_('gc', node) - if value is not None and 'gc' not in already_processed: - already_processed.add('gc') + self.validate_Nml2Quantity_currentDensity( + self.i_dend + ) # validate type Nml2Quantity_currentDensity + value = find_attr_value_("gc", node) + if value is not None and "gc" not in already_processed: + already_processed.add("gc") self.gc = value - self.validate_Nml2Quantity_conductanceDensity(self.gc) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gLs', node) - if value is not None and 'gLs' not in already_processed: - already_processed.add('gLs') + self.validate_Nml2Quantity_conductanceDensity( + self.gc + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gLs", node) + if value is not None and "gLs" not in already_processed: + already_processed.add("gLs") self.g_ls = value - self.validate_Nml2Quantity_conductanceDensity(self.g_ls) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gLd', node) - if value is not None and 'gLd' not in already_processed: - already_processed.add('gLd') + self.validate_Nml2Quantity_conductanceDensity( + self.g_ls + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gLd", node) + if value is not None and "gLd" not in already_processed: + already_processed.add("gLd") self.g_ld = value - self.validate_Nml2Quantity_conductanceDensity(self.g_ld) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gNa', node) - if value is not None and 'gNa' not in already_processed: - already_processed.add('gNa') + self.validate_Nml2Quantity_conductanceDensity( + self.g_ld + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gNa", node) + if value is not None and "gNa" not in already_processed: + already_processed.add("gNa") self.g_na = value - self.validate_Nml2Quantity_conductanceDensity(self.g_na) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gKdr', node) - if value is not None and 'gKdr' not in already_processed: - already_processed.add('gKdr') + self.validate_Nml2Quantity_conductanceDensity( + self.g_na + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gKdr", node) + if value is not None and "gKdr" not in already_processed: + already_processed.add("gKdr") self.g_kdr = value - self.validate_Nml2Quantity_conductanceDensity(self.g_kdr) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gCa', node) - if value is not None and 'gCa' not in already_processed: - already_processed.add('gCa') + self.validate_Nml2Quantity_conductanceDensity( + self.g_kdr + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gCa", node) + if value is not None and "gCa" not in already_processed: + already_processed.add("gCa") self.g_ca = value - self.validate_Nml2Quantity_conductanceDensity(self.g_ca) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gKahp', node) - if value is not None and 'gKahp' not in already_processed: - already_processed.add('gKahp') + self.validate_Nml2Quantity_conductanceDensity( + self.g_ca + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gKahp", node) + if value is not None and "gKahp" not in already_processed: + already_processed.add("gKahp") self.g_kahp = value - self.validate_Nml2Quantity_conductanceDensity(self.g_kahp) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gKC', node) - if value is not None and 'gKC' not in already_processed: - already_processed.add('gKC') + self.validate_Nml2Quantity_conductanceDensity( + self.g_kahp + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gKC", node) + if value is not None and "gKC" not in already_processed: + already_processed.add("gKC") self.g_kc = value - self.validate_Nml2Quantity_conductanceDensity(self.g_kc) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gNmda', node) - if value is not None and 'gNmda' not in already_processed: - already_processed.add('gNmda') + self.validate_Nml2Quantity_conductanceDensity( + self.g_kc + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gNmda", node) + if value is not None and "gNmda" not in already_processed: + already_processed.add("gNmda") self.g_nmda = value - self.validate_Nml2Quantity_conductanceDensity(self.g_nmda) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gAmpa', node) - if value is not None and 'gAmpa' not in already_processed: - already_processed.add('gAmpa') + self.validate_Nml2Quantity_conductanceDensity( + self.g_nmda + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gAmpa", node) + if value is not None and "gAmpa" not in already_processed: + already_processed.add("gAmpa") self.g_ampa = value - self.validate_Nml2Quantity_conductanceDensity(self.g_ampa) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('eNa', node) - if value is not None and 'eNa' not in already_processed: - already_processed.add('eNa') + self.validate_Nml2Quantity_conductanceDensity( + self.g_ampa + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("eNa", node) + if value is not None and "eNa" not in already_processed: + already_processed.add("eNa") self.e_na = value - self.validate_Nml2Quantity_voltage(self.e_na) # validate type Nml2Quantity_voltage - value = find_attr_value_('eCa', node) - if value is not None and 'eCa' not in already_processed: - already_processed.add('eCa') + self.validate_Nml2Quantity_voltage( + self.e_na + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("eCa", node) + if value is not None and "eCa" not in already_processed: + already_processed.add("eCa") self.e_ca = value - self.validate_Nml2Quantity_voltage(self.e_ca) # validate type Nml2Quantity_voltage - value = find_attr_value_('eK', node) - if value is not None and 'eK' not in already_processed: - already_processed.add('eK') + self.validate_Nml2Quantity_voltage( + self.e_ca + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("eK", node) + if value is not None and "eK" not in already_processed: + already_processed.add("eK") self.e_k = value - self.validate_Nml2Quantity_voltage(self.e_k) # validate type Nml2Quantity_voltage - value = find_attr_value_('eL', node) - if value is not None and 'eL' not in already_processed: - already_processed.add('eL') + self.validate_Nml2Quantity_voltage( + self.e_k + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("eL", node) + if value is not None and "eL" not in already_processed: + already_processed.add("eL") self.e_l = value - self.validate_Nml2Quantity_voltage(self.e_l) # validate type Nml2Quantity_voltage - value = find_attr_value_('qd0', node) - if value is not None and 'qd0' not in already_processed: - already_processed.add('qd0') + self.validate_Nml2Quantity_voltage( + self.e_l + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("qd0", node) + if value is not None and "qd0" not in already_processed: + already_processed.add("qd0") self.qd0 = value - self.validate_Nml2Quantity_none(self.qd0) # validate type Nml2Quantity_none - value = find_attr_value_('pp', node) - if value is not None and 'pp' not in already_processed: - already_processed.add('pp') + self.validate_Nml2Quantity_none(self.qd0) # validate type Nml2Quantity_none + value = find_attr_value_("pp", node) + if value is not None and "pp" not in already_processed: + already_processed.add("pp") self.pp = value - self.validate_Nml2Quantity_none(self.pp) # validate type Nml2Quantity_none - value = find_attr_value_('alphac', node) - if value is not None and 'alphac' not in already_processed: - already_processed.add('alphac') + self.validate_Nml2Quantity_none(self.pp) # validate type Nml2Quantity_none + value = find_attr_value_("alphac", node) + if value is not None and "alphac" not in already_processed: + already_processed.add("alphac") self.alphac = value - self.validate_Nml2Quantity_none(self.alphac) # validate type Nml2Quantity_none - value = find_attr_value_('betac', node) - if value is not None and 'betac' not in already_processed: - already_processed.add('betac') + self.validate_Nml2Quantity_none( + self.alphac + ) # validate type Nml2Quantity_none + value = find_attr_value_("betac", node) + if value is not None and "betac" not in already_processed: + already_processed.add("betac") self.betac = value - self.validate_Nml2Quantity_none(self.betac) # validate type Nml2Quantity_none - value = find_attr_value_('cm', node) - if value is not None and 'cm' not in already_processed: - already_processed.add('cm') + self.validate_Nml2Quantity_none( + self.betac + ) # validate type Nml2Quantity_none + value = find_attr_value_("cm", node) + if value is not None and "cm" not in already_processed: + already_processed.add("cm") self.cm = value - self.validate_Nml2Quantity_specificCapacitance(self.cm) # validate type Nml2Quantity_specificCapacitance + self.validate_Nml2Quantity_specificCapacitance( + self.cm + ) # validate type Nml2Quantity_specificCapacitance super(PinskyRinzelCA3Cell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(PinskyRinzelCA3Cell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class PinskyRinzelCA3Cell class FitzHughNagumo1969Cell(BaseCell): member_data_items_ = [ - MemberSpec_('a', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('I', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('phi', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('V0', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('W0', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_("a", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("b", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("I", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("phi", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("V0", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("W0", "Nml2Quantity_none", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, a=None, b=None, I=None, phi=None, V0=None, W0=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + a=None, + b=None, + I=None, + phi=None, + V0=None, + W0=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(FitzHughNagumo1969Cell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(FitzHughNagumo1969Cell, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.a = _cast(None, a) self.b = _cast(None, b) self.I = _cast(None, I) self.phi = _cast(None, phi) self.V0 = _cast(None, V0) self.W0 = _cast(None, W0) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, FitzHughNagumo1969Cell) + CurrentSubclassModule_, FitzHughNagumo1969Cell + ) if subclass is not None: return subclass(*args_, **kwargs_) if FitzHughNagumo1969Cell.subclass: return FitzHughNagumo1969Cell.subclass(*args_, **kwargs_) else: return FitzHughNagumo1969Cell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$"] + ] + def hasContent_(self): - if ( - super(FitzHughNagumo1969Cell, self).hasContent_() - ): + if super(FitzHughNagumo1969Cell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FitzHughNagumo1969Cell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('FitzHughNagumo1969Cell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FitzHughNagumo1969Cell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("FitzHughNagumo1969Cell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumo1969Cell') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="FitzHughNagumo1969Cell", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FitzHughNagumo1969Cell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="FitzHughNagumo1969Cell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FitzHughNagumo1969Cell'): - super(FitzHughNagumo1969Cell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumo1969Cell') - if self.a is not None and 'a' not in already_processed: - already_processed.add('a') - outfile.write(' a=%s' % (quote_attrib(self.a), )) - if self.b is not None and 'b' not in already_processed: - already_processed.add('b') - outfile.write(' b=%s' % (quote_attrib(self.b), )) - if self.I is not None and 'I' not in already_processed: - already_processed.add('I') - outfile.write(' I=%s' % (quote_attrib(self.I), )) - if self.phi is not None and 'phi' not in already_processed: - already_processed.add('phi') - outfile.write(' phi=%s' % (quote_attrib(self.phi), )) - if self.V0 is not None and 'V0' not in already_processed: - already_processed.add('V0') - outfile.write(' V0=%s' % (quote_attrib(self.V0), )) - if self.W0 is not None and 'W0' not in already_processed: - already_processed.add('W0') - outfile.write(' W0=%s' % (quote_attrib(self.W0), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FitzHughNagumo1969Cell', fromsubclass_=False, pretty_print=True): - super(FitzHughNagumo1969Cell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="FitzHughNagumo1969Cell", + ): + super(FitzHughNagumo1969Cell, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="FitzHughNagumo1969Cell", + ) + if self.a is not None and "a" not in already_processed: + already_processed.add("a") + outfile.write(" a=%s" % (quote_attrib(self.a),)) + if self.b is not None and "b" not in already_processed: + already_processed.add("b") + outfile.write(" b=%s" % (quote_attrib(self.b),)) + if self.I is not None and "I" not in already_processed: + already_processed.add("I") + outfile.write(" I=%s" % (quote_attrib(self.I),)) + if self.phi is not None and "phi" not in already_processed: + already_processed.add("phi") + outfile.write(" phi=%s" % (quote_attrib(self.phi),)) + if self.V0 is not None and "V0" not in already_processed: + already_processed.add("V0") + outfile.write(" V0=%s" % (quote_attrib(self.V0),)) + if self.W0 is not None and "W0" not in already_processed: + already_processed.add("W0") + outfile.write(" W0=%s" % (quote_attrib(self.W0),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FitzHughNagumo1969Cell", + fromsubclass_=False, + pretty_print=True, + ): + super(FitzHughNagumo1969Cell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -18661,108 +34388,196 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('a', node) - if value is not None and 'a' not in already_processed: - already_processed.add('a') + value = find_attr_value_("a", node) + if value is not None and "a" not in already_processed: + already_processed.add("a") self.a = value - self.validate_Nml2Quantity_none(self.a) # validate type Nml2Quantity_none - value = find_attr_value_('b', node) - if value is not None and 'b' not in already_processed: - already_processed.add('b') + self.validate_Nml2Quantity_none(self.a) # validate type Nml2Quantity_none + value = find_attr_value_("b", node) + if value is not None and "b" not in already_processed: + already_processed.add("b") self.b = value - self.validate_Nml2Quantity_none(self.b) # validate type Nml2Quantity_none - value = find_attr_value_('I', node) - if value is not None and 'I' not in already_processed: - already_processed.add('I') + self.validate_Nml2Quantity_none(self.b) # validate type Nml2Quantity_none + value = find_attr_value_("I", node) + if value is not None and "I" not in already_processed: + already_processed.add("I") self.I = value - self.validate_Nml2Quantity_none(self.I) # validate type Nml2Quantity_none - value = find_attr_value_('phi', node) - if value is not None and 'phi' not in already_processed: - already_processed.add('phi') + self.validate_Nml2Quantity_none(self.I) # validate type Nml2Quantity_none + value = find_attr_value_("phi", node) + if value is not None and "phi" not in already_processed: + already_processed.add("phi") self.phi = value - self.validate_Nml2Quantity_none(self.phi) # validate type Nml2Quantity_none - value = find_attr_value_('V0', node) - if value is not None and 'V0' not in already_processed: - already_processed.add('V0') + self.validate_Nml2Quantity_none(self.phi) # validate type Nml2Quantity_none + value = find_attr_value_("V0", node) + if value is not None and "V0" not in already_processed: + already_processed.add("V0") self.V0 = value - self.validate_Nml2Quantity_none(self.V0) # validate type Nml2Quantity_none - value = find_attr_value_('W0', node) - if value is not None and 'W0' not in already_processed: - already_processed.add('W0') + self.validate_Nml2Quantity_none(self.V0) # validate type Nml2Quantity_none + value = find_attr_value_("W0", node) + if value is not None and "W0" not in already_processed: + already_processed.add("W0") self.W0 = value - self.validate_Nml2Quantity_none(self.W0) # validate type Nml2Quantity_none - super(FitzHughNagumo1969Cell, self).buildAttributes(node, attrs, already_processed) + self.validate_Nml2Quantity_none(self.W0) # validate type Nml2Quantity_none + super(FitzHughNagumo1969Cell, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(FitzHughNagumo1969Cell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class FitzHughNagumo1969Cell class FitzHughNagumoCell(BaseCell): member_data_items_ = [ - MemberSpec_('I', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_("I", "Nml2Quantity_none", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, I=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + I=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(FitzHughNagumoCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(FitzHughNagumoCell, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.I = _cast(None, I) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, FitzHughNagumoCell) + CurrentSubclassModule_, FitzHughNagumoCell + ) if subclass is not None: return subclass(*args_, **kwargs_) if FitzHughNagumoCell.subclass: return FitzHughNagumoCell.subclass(*args_, **kwargs_) else: return FitzHughNagumoCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$"] + ] + def hasContent_(self): - if ( - super(FitzHughNagumoCell, self).hasContent_() - ): + if super(FitzHughNagumoCell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FitzHughNagumoCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('FitzHughNagumoCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FitzHughNagumoCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("FitzHughNagumoCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumoCell') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="FitzHughNagumoCell", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FitzHughNagumoCell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="FitzHughNagumoCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FitzHughNagumoCell'): - super(FitzHughNagumoCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumoCell') - if self.I is not None and 'I' not in already_processed: - already_processed.add('I') - outfile.write(' I=%s' % (quote_attrib(self.I), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FitzHughNagumoCell', fromsubclass_=False, pretty_print=True): - super(FitzHughNagumoCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="FitzHughNagumoCell", + ): + super(FitzHughNagumoCell, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="FitzHughNagumoCell", + ) + if self.I is not None and "I" not in already_processed: + already_processed.add("I") + outfile.write(" I=%s" % (quote_attrib(self.I),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FitzHughNagumoCell", + fromsubclass_=False, + pretty_print=True, + ): + super(FitzHughNagumoCell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -18770,90 +34585,185 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('I', node) - if value is not None and 'I' not in already_processed: - already_processed.add('I') + value = find_attr_value_("I", node) + if value is not None and "I" not in already_processed: + already_processed.add("I") self.I = value - self.validate_Nml2Quantity_none(self.I) # validate type Nml2Quantity_none + self.validate_Nml2Quantity_none(self.I) # validate type Nml2Quantity_none super(FitzHughNagumoCell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(FitzHughNagumoCell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class FitzHughNagumoCell class BaseCellMembPotCap(BaseCell): """This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007""" + member_data_items_ = [ - MemberSpec_('C', 'Nml2Quantity_capacitance', 0, 0, {'use': u'required'}), + MemberSpec_("C", "Nml2Quantity_capacitance", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, C=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + C=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseCellMembPotCap, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BaseCellMembPotCap, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.C = _cast(None, C) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseCellMembPotCap) + CurrentSubclassModule_, BaseCellMembPotCap + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseCellMembPotCap.subclass: return BaseCellMembPotCap.subclass(*args_, **kwargs_) else: return BaseCellMembPotCap(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_capacitance(self, value): # Validate type Nml2Quantity_capacitance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_capacitance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_capacitance_patterns_, )) - validate_Nml2Quantity_capacitance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F|uF|nF|pF)$']] + self.validate_Nml2Quantity_capacitance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_capacitance_patterns_, + ) + ) + + validate_Nml2Quantity_capacitance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F|uF|nF|pF)$"] + ] + def hasContent_(self): - if ( - super(BaseCellMembPotCap, self).hasContent_() - ): + if super(BaseCellMembPotCap, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCellMembPotCap', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseCellMembPotCap') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseCellMembPotCap", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseCellMembPotCap") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCellMembPotCap') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseCellMembPotCap", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseCellMembPotCap', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseCellMembPotCap", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseCellMembPotCap'): - super(BaseCellMembPotCap, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCellMembPotCap') - if self.C is not None and 'C' not in already_processed: - already_processed.add('C') - outfile.write(' C=%s' % (quote_attrib(self.C), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseCellMembPotCap", + ): + super(BaseCellMembPotCap, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseCellMembPotCap", + ) + if self.C is not None and "C" not in already_processed: + already_processed.add("C") + outfile.write(" C=%s" % (quote_attrib(self.C),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCellMembPotCap', fromsubclass_=False, pretty_print=True): - super(BaseCellMembPotCap, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseCellMembPotCap", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseCellMembPotCap, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -18861,119 +34771,213 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('C', node) - if value is not None and 'C' not in already_processed: - already_processed.add('C') + value = find_attr_value_("C", node) + if value is not None and "C" not in already_processed: + already_processed.add("C") self.C = value - self.validate_Nml2Quantity_capacitance(self.C) # validate type Nml2Quantity_capacitance - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_capacitance( + self.C + ) # validate type Nml2Quantity_capacitance + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(BaseCellMembPotCap, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(BaseCellMembPotCap, self).buildChildren(child_, node, nodeName_, True) pass + + # end class BaseCellMembPotCap class IzhikevichCell(BaseCell): member_data_items_ = [ - MemberSpec_('v0', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('a', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('c', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('d', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_("v0", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("thresh", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("a", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("b", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("c", "Nml2Quantity_none", 0, 0, {"use": u"required"}), + MemberSpec_("d", "Nml2Quantity_none", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, v0=None, thresh=None, a=None, b=None, c=None, d=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + v0=None, + thresh=None, + a=None, + b=None, + c=None, + d=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IzhikevichCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IzhikevichCell, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.v0 = _cast(None, v0) self.thresh = _cast(None, thresh) self.a = _cast(None, a) self.b = _cast(None, b) self.c = _cast(None, c) self.d = _cast(None, d) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IzhikevichCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IzhikevichCell) if subclass is not None: return subclass(*args_, **kwargs_) if IzhikevichCell.subclass: return IzhikevichCell.subclass(*args_, **kwargs_) else: return IzhikevichCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$"] + ] + def hasContent_(self): - if ( - super(IzhikevichCell, self).hasContent_() - ): + if super(IzhikevichCell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IzhikevichCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IzhikevichCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IzhikevichCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IzhikevichCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IzhikevichCell') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IzhikevichCell" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IzhikevichCell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IzhikevichCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IzhikevichCell'): - super(IzhikevichCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IzhikevichCell') - if self.v0 is not None and 'v0' not in already_processed: - already_processed.add('v0') - outfile.write(' v0=%s' % (quote_attrib(self.v0), )) - if self.thresh is not None and 'thresh' not in already_processed: - already_processed.add('thresh') - outfile.write(' thresh=%s' % (quote_attrib(self.thresh), )) - if self.a is not None and 'a' not in already_processed: - already_processed.add('a') - outfile.write(' a=%s' % (quote_attrib(self.a), )) - if self.b is not None and 'b' not in already_processed: - already_processed.add('b') - outfile.write(' b=%s' % (quote_attrib(self.b), )) - if self.c is not None and 'c' not in already_processed: - already_processed.add('c') - outfile.write(' c=%s' % (quote_attrib(self.c), )) - if self.d is not None and 'd' not in already_processed: - already_processed.add('d') - outfile.write(' d=%s' % (quote_attrib(self.d), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IzhikevichCell', fromsubclass_=False, pretty_print=True): - super(IzhikevichCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IzhikevichCell", + ): + super(IzhikevichCell, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IzhikevichCell" + ) + if self.v0 is not None and "v0" not in already_processed: + already_processed.add("v0") + outfile.write(" v0=%s" % (quote_attrib(self.v0),)) + if self.thresh is not None and "thresh" not in already_processed: + already_processed.add("thresh") + outfile.write(" thresh=%s" % (quote_attrib(self.thresh),)) + if self.a is not None and "a" not in already_processed: + already_processed.add("a") + outfile.write(" a=%s" % (quote_attrib(self.a),)) + if self.b is not None and "b" not in already_processed: + already_processed.add("b") + outfile.write(" b=%s" % (quote_attrib(self.b),)) + if self.c is not None and "c" not in already_processed: + already_processed.add("c") + outfile.write(" c=%s" % (quote_attrib(self.c),)) + if self.d is not None and "d" not in already_processed: + already_processed.add("d") + outfile.write(" d=%s" % (quote_attrib(self.d),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IzhikevichCell", + fromsubclass_=False, + pretty_print=True, + ): + super(IzhikevichCell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -18981,147 +34985,265 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('v0', node) - if value is not None and 'v0' not in already_processed: - already_processed.add('v0') + value = find_attr_value_("v0", node) + if value is not None and "v0" not in already_processed: + already_processed.add("v0") self.v0 = value - self.validate_Nml2Quantity_voltage(self.v0) # validate type Nml2Quantity_voltage - value = find_attr_value_('thresh', node) - if value is not None and 'thresh' not in already_processed: - already_processed.add('thresh') + self.validate_Nml2Quantity_voltage( + self.v0 + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("thresh", node) + if value is not None and "thresh" not in already_processed: + already_processed.add("thresh") self.thresh = value - self.validate_Nml2Quantity_voltage(self.thresh) # validate type Nml2Quantity_voltage - value = find_attr_value_('a', node) - if value is not None and 'a' not in already_processed: - already_processed.add('a') + self.validate_Nml2Quantity_voltage( + self.thresh + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("a", node) + if value is not None and "a" not in already_processed: + already_processed.add("a") self.a = value - self.validate_Nml2Quantity_none(self.a) # validate type Nml2Quantity_none - value = find_attr_value_('b', node) - if value is not None and 'b' not in already_processed: - already_processed.add('b') + self.validate_Nml2Quantity_none(self.a) # validate type Nml2Quantity_none + value = find_attr_value_("b", node) + if value is not None and "b" not in already_processed: + already_processed.add("b") self.b = value - self.validate_Nml2Quantity_none(self.b) # validate type Nml2Quantity_none - value = find_attr_value_('c', node) - if value is not None and 'c' not in already_processed: - already_processed.add('c') + self.validate_Nml2Quantity_none(self.b) # validate type Nml2Quantity_none + value = find_attr_value_("c", node) + if value is not None and "c" not in already_processed: + already_processed.add("c") self.c = value - self.validate_Nml2Quantity_none(self.c) # validate type Nml2Quantity_none - value = find_attr_value_('d', node) - if value is not None and 'd' not in already_processed: - already_processed.add('d') + self.validate_Nml2Quantity_none(self.c) # validate type Nml2Quantity_none + value = find_attr_value_("d", node) + if value is not None and "d" not in already_processed: + already_processed.add("d") self.d = value - self.validate_Nml2Quantity_none(self.d) # validate type Nml2Quantity_none + self.validate_Nml2Quantity_none(self.d) # validate type Nml2Quantity_none super(IzhikevichCell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(IzhikevichCell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class IzhikevichCell class IafCell(BaseCell): member_data_items_ = [ - MemberSpec_('leak_reversal', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('reset', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('C', 'Nml2Quantity_capacitance', 0, 0, {'use': u'required'}), - MemberSpec_('leak_conductance', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), + MemberSpec_( + "leak_reversal", "Nml2Quantity_voltage", 0, 0, {"use": u"required"} + ), + MemberSpec_("thresh", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("reset", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("C", "Nml2Quantity_capacitance", 0, 0, {"use": u"required"}), + MemberSpec_( + "leak_conductance", "Nml2Quantity_conductance", 0, 0, {"use": u"required"} + ), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, C=None, leak_conductance=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + leak_reversal=None, + thresh=None, + reset=None, + C=None, + leak_conductance=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IafCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IafCell, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.leak_reversal = _cast(None, leak_reversal) self.thresh = _cast(None, thresh) self.reset = _cast(None, reset) self.C = _cast(None, C) self.leak_conductance = _cast(None, leak_conductance) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IafCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IafCell) if subclass is not None: return subclass(*args_, **kwargs_) if IafCell.subclass: return IafCell.subclass(*args_, **kwargs_) else: return IafCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def validate_Nml2Quantity_capacitance(self, value): # Validate type Nml2Quantity_capacitance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_capacitance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_capacitance_patterns_, )) - validate_Nml2Quantity_capacitance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F|uF|nF|pF)$']] + self.validate_Nml2Quantity_capacitance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_capacitance_patterns_, + ) + ) + + validate_Nml2Quantity_capacitance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F|uF|nF|pF)$"] + ] + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$"] + ] + def hasContent_(self): - if ( - super(IafCell, self).hasContent_() - ): + if super(IafCell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IafCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IafCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafCell') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafCell" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafCell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IafCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafCell'): - super(IafCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafCell') - if self.leak_reversal is not None and 'leak_reversal' not in already_processed: - already_processed.add('leak_reversal') - outfile.write(' leakReversal=%s' % (quote_attrib(self.leak_reversal), )) - if self.thresh is not None and 'thresh' not in already_processed: - already_processed.add('thresh') - outfile.write(' thresh=%s' % (quote_attrib(self.thresh), )) - if self.reset is not None and 'reset' not in already_processed: - already_processed.add('reset') - outfile.write(' reset=%s' % (quote_attrib(self.reset), )) - if self.C is not None and 'C' not in already_processed: - already_processed.add('C') - outfile.write(' C=%s' % (quote_attrib(self.C), )) - if self.leak_conductance is not None and 'leak_conductance' not in already_processed: - already_processed.add('leak_conductance') - outfile.write(' leakConductance=%s' % (quote_attrib(self.leak_conductance), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="IafCell" + ): + super(IafCell, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafCell" + ) + if self.leak_reversal is not None and "leak_reversal" not in already_processed: + already_processed.add("leak_reversal") + outfile.write(" leakReversal=%s" % (quote_attrib(self.leak_reversal),)) + if self.thresh is not None and "thresh" not in already_processed: + already_processed.add("thresh") + outfile.write(" thresh=%s" % (quote_attrib(self.thresh),)) + if self.reset is not None and "reset" not in already_processed: + already_processed.add("reset") + outfile.write(" reset=%s" % (quote_attrib(self.reset),)) + if self.C is not None and "C" not in already_processed: + already_processed.add("C") + outfile.write(" C=%s" % (quote_attrib(self.C),)) + if ( + self.leak_conductance is not None + and "leak_conductance" not in already_processed + ): + already_processed.add("leak_conductance") + outfile.write( + " leakConductance=%s" % (quote_attrib(self.leak_conductance),) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafCell', fromsubclass_=False, pretty_print=True): - super(IafCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafCell", + fromsubclass_=False, + pretty_print=True, + ): + super(IafCell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -19129,134 +35251,239 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('leakReversal', node) - if value is not None and 'leakReversal' not in already_processed: - already_processed.add('leakReversal') + value = find_attr_value_("leakReversal", node) + if value is not None and "leakReversal" not in already_processed: + already_processed.add("leakReversal") self.leak_reversal = value - self.validate_Nml2Quantity_voltage(self.leak_reversal) # validate type Nml2Quantity_voltage - value = find_attr_value_('thresh', node) - if value is not None and 'thresh' not in already_processed: - already_processed.add('thresh') + self.validate_Nml2Quantity_voltage( + self.leak_reversal + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("thresh", node) + if value is not None and "thresh" not in already_processed: + already_processed.add("thresh") self.thresh = value - self.validate_Nml2Quantity_voltage(self.thresh) # validate type Nml2Quantity_voltage - value = find_attr_value_('reset', node) - if value is not None and 'reset' not in already_processed: - already_processed.add('reset') + self.validate_Nml2Quantity_voltage( + self.thresh + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("reset", node) + if value is not None and "reset" not in already_processed: + already_processed.add("reset") self.reset = value - self.validate_Nml2Quantity_voltage(self.reset) # validate type Nml2Quantity_voltage - value = find_attr_value_('C', node) - if value is not None and 'C' not in already_processed: - already_processed.add('C') + self.validate_Nml2Quantity_voltage( + self.reset + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("C", node) + if value is not None and "C" not in already_processed: + already_processed.add("C") self.C = value - self.validate_Nml2Quantity_capacitance(self.C) # validate type Nml2Quantity_capacitance - value = find_attr_value_('leakConductance', node) - if value is not None and 'leakConductance' not in already_processed: - already_processed.add('leakConductance') + self.validate_Nml2Quantity_capacitance( + self.C + ) # validate type Nml2Quantity_capacitance + value = find_attr_value_("leakConductance", node) + if value is not None and "leakConductance" not in already_processed: + already_processed.add("leakConductance") self.leak_conductance = value - self.validate_Nml2Quantity_conductance(self.leak_conductance) # validate type Nml2Quantity_conductance - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_conductance( + self.leak_conductance + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(IafCell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(IafCell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class IafCell class IafTauCell(BaseCell): member_data_items_ = [ - MemberSpec_('leak_reversal', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('reset', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('tau', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "leak_reversal", "Nml2Quantity_voltage", 0, 0, {"use": u"required"} + ), + MemberSpec_("thresh", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("reset", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("tau", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, tau=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + leak_reversal=None, + thresh=None, + reset=None, + tau=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IafTauCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IafTauCell, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.leak_reversal = _cast(None, leak_reversal) self.thresh = _cast(None, thresh) self.reset = _cast(None, reset) self.tau = _cast(None, tau) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IafTauCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IafTauCell) if subclass is not None: return subclass(*args_, **kwargs_) if IafTauCell.subclass: return IafTauCell.subclass(*args_, **kwargs_) else: return IafTauCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(IafTauCell, self).hasContent_() - ): + if super(IafTauCell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafTauCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IafTauCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafTauCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IafTauCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauCell') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafTauCell" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafTauCell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IafTauCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafTauCell'): - super(IafTauCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauCell') - if self.leak_reversal is not None and 'leak_reversal' not in already_processed: - already_processed.add('leak_reversal') - outfile.write(' leakReversal=%s' % (quote_attrib(self.leak_reversal), )) - if self.thresh is not None and 'thresh' not in already_processed: - already_processed.add('thresh') - outfile.write(' thresh=%s' % (quote_attrib(self.thresh), )) - if self.reset is not None and 'reset' not in already_processed: - already_processed.add('reset') - outfile.write(' reset=%s' % (quote_attrib(self.reset), )) - if self.tau is not None and 'tau' not in already_processed: - already_processed.add('tau') - outfile.write(' tau=%s' % (quote_attrib(self.tau), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="IafTauCell" + ): + super(IafTauCell, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafTauCell" + ) + if self.leak_reversal is not None and "leak_reversal" not in already_processed: + already_processed.add("leak_reversal") + outfile.write(" leakReversal=%s" % (quote_attrib(self.leak_reversal),)) + if self.thresh is not None and "thresh" not in already_processed: + already_processed.add("thresh") + outfile.write(" thresh=%s" % (quote_attrib(self.thresh),)) + if self.reset is not None and "reset" not in already_processed: + already_processed.add("reset") + outfile.write(" reset=%s" % (quote_attrib(self.reset),)) + if self.tau is not None and "tau" not in already_processed: + already_processed.add("tau") + outfile.write(" tau=%s" % (quote_attrib(self.tau),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafTauCell', fromsubclass_=False, pretty_print=True): - super(IafTauCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafTauCell", + fromsubclass_=False, + pretty_print=True, + ): + super(IafTauCell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -19264,138 +35491,249 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('leakReversal', node) - if value is not None and 'leakReversal' not in already_processed: - already_processed.add('leakReversal') + value = find_attr_value_("leakReversal", node) + if value is not None and "leakReversal" not in already_processed: + already_processed.add("leakReversal") self.leak_reversal = value - self.validate_Nml2Quantity_voltage(self.leak_reversal) # validate type Nml2Quantity_voltage - value = find_attr_value_('thresh', node) - if value is not None and 'thresh' not in already_processed: - already_processed.add('thresh') + self.validate_Nml2Quantity_voltage( + self.leak_reversal + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("thresh", node) + if value is not None and "thresh" not in already_processed: + already_processed.add("thresh") self.thresh = value - self.validate_Nml2Quantity_voltage(self.thresh) # validate type Nml2Quantity_voltage - value = find_attr_value_('reset', node) - if value is not None and 'reset' not in already_processed: - already_processed.add('reset') + self.validate_Nml2Quantity_voltage( + self.thresh + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("reset", node) + if value is not None and "reset" not in already_processed: + already_processed.add("reset") self.reset = value - self.validate_Nml2Quantity_voltage(self.reset) # validate type Nml2Quantity_voltage - value = find_attr_value_('tau', node) - if value is not None and 'tau' not in already_processed: - already_processed.add('tau') + self.validate_Nml2Quantity_voltage( + self.reset + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("tau", node) + if value is not None and "tau" not in already_processed: + already_processed.add("tau") self.tau = value - self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(IafTauCell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(IafTauCell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class IafTauCell class GradedSynapse(BaseSynapse): """Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html.""" + member_data_items_ = [ - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('delta', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('Vth', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('k', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_( + "conductance", "Nml2Quantity_conductance", 0, 0, {"use": u"required"} + ), + MemberSpec_("delta", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("Vth", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("k", "Nml2Quantity_pertime", 0, 0, {"use": u"required"}), + MemberSpec_("erev", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, conductance=None, delta=None, Vth=None, k=None, erev=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + conductance=None, + delta=None, + Vth=None, + k=None, + erev=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GradedSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(GradedSynapse, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.conductance = _cast(None, conductance) self.delta = _cast(None, delta) self.Vth = _cast(None, Vth) self.k = _cast(None, k) self.erev = _cast(None, erev) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GradedSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GradedSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if GradedSynapse.subclass: return GradedSynapse.subclass(*args_, **kwargs_) else: return GradedSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.validate_Nml2Quantity_pertime_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$"] + ] + def hasContent_(self): - if ( - super(GradedSynapse, self).hasContent_() - ): + if super(GradedSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GradedSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GradedSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GradedSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GradedSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GradedSynapse') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GradedSynapse" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GradedSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GradedSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GradedSynapse'): - super(GradedSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GradedSynapse') - if self.conductance is not None and 'conductance' not in already_processed: - already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - if self.delta is not None and 'delta' not in already_processed: - already_processed.add('delta') - outfile.write(' delta=%s' % (quote_attrib(self.delta), )) - if self.Vth is not None and 'Vth' not in already_processed: - already_processed.add('Vth') - outfile.write(' Vth=%s' % (quote_attrib(self.Vth), )) - if self.k is not None and 'k' not in already_processed: - already_processed.add('k') - outfile.write(' k=%s' % (quote_attrib(self.k), )) - if self.erev is not None and 'erev' not in already_processed: - already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GradedSynapse', fromsubclass_=False, pretty_print=True): - super(GradedSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GradedSynapse", + ): + super(GradedSynapse, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GradedSynapse" + ) + if self.conductance is not None and "conductance" not in already_processed: + already_processed.add("conductance") + outfile.write(" conductance=%s" % (quote_attrib(self.conductance),)) + if self.delta is not None and "delta" not in already_processed: + already_processed.add("delta") + outfile.write(" delta=%s" % (quote_attrib(self.delta),)) + if self.Vth is not None and "Vth" not in already_processed: + already_processed.add("Vth") + outfile.write(" Vth=%s" % (quote_attrib(self.Vth),)) + if self.k is not None and "k" not in already_processed: + already_processed.add("k") + outfile.write(" k=%s" % (quote_attrib(self.k),)) + if self.erev is not None and "erev" not in already_processed: + already_processed.add("erev") + outfile.write(" erev=%s" % (quote_attrib(self.erev),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GradedSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(GradedSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -19403,104 +35741,203 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('conductance', node) - if value is not None and 'conductance' not in already_processed: - already_processed.add('conductance') + value = find_attr_value_("conductance", node) + if value is not None and "conductance" not in already_processed: + already_processed.add("conductance") self.conductance = value - self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance - value = find_attr_value_('delta', node) - if value is not None and 'delta' not in already_processed: - already_processed.add('delta') + self.validate_Nml2Quantity_conductance( + self.conductance + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("delta", node) + if value is not None and "delta" not in already_processed: + already_processed.add("delta") self.delta = value - self.validate_Nml2Quantity_voltage(self.delta) # validate type Nml2Quantity_voltage - value = find_attr_value_('Vth', node) - if value is not None and 'Vth' not in already_processed: - already_processed.add('Vth') + self.validate_Nml2Quantity_voltage( + self.delta + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("Vth", node) + if value is not None and "Vth" not in already_processed: + already_processed.add("Vth") self.Vth = value - self.validate_Nml2Quantity_voltage(self.Vth) # validate type Nml2Quantity_voltage - value = find_attr_value_('k', node) - if value is not None and 'k' not in already_processed: - already_processed.add('k') + self.validate_Nml2Quantity_voltage( + self.Vth + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("k", node) + if value is not None and "k" not in already_processed: + already_processed.add("k") self.k = value - self.validate_Nml2Quantity_pertime(self.k) # validate type Nml2Quantity_pertime - value = find_attr_value_('erev', node) - if value is not None and 'erev' not in already_processed: - already_processed.add('erev') + self.validate_Nml2Quantity_pertime( + self.k + ) # validate type Nml2Quantity_pertime + value = find_attr_value_("erev", node) + if value is not None and "erev" not in already_processed: + already_processed.add("erev") self.erev = value - self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage + self.validate_Nml2Quantity_voltage( + self.erev + ) # validate type Nml2Quantity_voltage super(GradedSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(GradedSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class GradedSynapse class LinearGradedSynapse(BaseSynapse): """Behaves just like a one way gap junction.""" + member_data_items_ = [ - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), + MemberSpec_( + "conductance", "Nml2Quantity_conductance", 0, 0, {"use": u"required"} + ), ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, conductance=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + conductance=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(LinearGradedSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(LinearGradedSynapse, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.conductance = _cast(None, conductance) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, LinearGradedSynapse) + CurrentSubclassModule_, LinearGradedSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if LinearGradedSynapse.subclass: return LinearGradedSynapse.subclass(*args_, **kwargs_) else: return LinearGradedSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$"] + ] + def hasContent_(self): - if ( - super(LinearGradedSynapse, self).hasContent_() - ): + if super(LinearGradedSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LinearGradedSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('LinearGradedSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="LinearGradedSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("LinearGradedSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LinearGradedSynapse') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="LinearGradedSynapse", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LinearGradedSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="LinearGradedSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LinearGradedSynapse'): - super(LinearGradedSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LinearGradedSynapse') - if self.conductance is not None and 'conductance' not in already_processed: - already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LinearGradedSynapse', fromsubclass_=False, pretty_print=True): - super(LinearGradedSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="LinearGradedSynapse", + ): + super(LinearGradedSynapse, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="LinearGradedSynapse", + ) + if self.conductance is not None and "conductance" not in already_processed: + already_processed.add("conductance") + outfile.write(" conductance=%s" % (quote_attrib(self.conductance),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="LinearGradedSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(LinearGradedSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -19508,73 +35945,139 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('conductance', node) - if value is not None and 'conductance' not in already_processed: - already_processed.add('conductance') + value = find_attr_value_("conductance", node) + if value is not None and "conductance" not in already_processed: + already_processed.add("conductance") self.conductance = value - self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance + self.validate_Nml2Quantity_conductance( + self.conductance + ) # validate type Nml2Quantity_conductance super(LinearGradedSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(LinearGradedSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class LinearGradedSynapse class SilentSynapse(BaseSynapse): """Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection (continuousConnection).""" - member_data_items_ = [ - ] + + member_data_items_ = [] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SilentSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(SilentSynapse, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SilentSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SilentSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if SilentSynapse.subclass: return SilentSynapse.subclass(*args_, **kwargs_) else: return SilentSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(SilentSynapse, self).hasContent_() - ): + if super(SilentSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SilentSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SilentSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SilentSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SilentSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SilentSynapse') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SilentSynapse" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SilentSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SilentSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SilentSynapse'): - super(SilentSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SilentSynapse') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SilentSynapse', fromsubclass_=False, pretty_print=True): - super(SilentSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SilentSynapse", + ): + super(SilentSynapse, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SilentSynapse" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SilentSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(SilentSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -19582,79 +36085,158 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(SilentSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(SilentSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class SilentSynapse class GapJunction(BaseSynapse): """Gap junction/single electrical connection""" + member_data_items_ = [ - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), + MemberSpec_( + "conductance", "Nml2Quantity_conductance", 0, 0, {"use": u"required"} + ), ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, conductance=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + conductance=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GapJunction, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(GapJunction, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.conductance = _cast(None, conductance) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GapJunction) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GapJunction) if subclass is not None: return subclass(*args_, **kwargs_) if GapJunction.subclass: return GapJunction.subclass(*args_, **kwargs_) else: return GapJunction(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$"] + ] + def hasContent_(self): - if ( - super(GapJunction, self).hasContent_() - ): + if super(GapJunction, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GapJunction', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GapJunction') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GapJunction", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GapJunction") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GapJunction') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GapJunction" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GapJunction', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GapJunction", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GapJunction'): - super(GapJunction, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GapJunction') - if self.conductance is not None and 'conductance' not in already_processed: - already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GapJunction', fromsubclass_=False, pretty_print=True): - super(GapJunction, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GapJunction", + ): + super(GapJunction, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GapJunction" + ) + if self.conductance is not None and "conductance" not in already_processed: + already_processed.add("conductance") + outfile.write(" conductance=%s" % (quote_attrib(self.conductance),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GapJunction", + fromsubclass_=False, + pretty_print=True, + ): + super(GapJunction, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -19662,76 +36244,159 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('conductance', node) - if value is not None and 'conductance' not in already_processed: - already_processed.add('conductance') + value = find_attr_value_("conductance", node) + if value is not None and "conductance" not in already_processed: + already_processed.add("conductance") self.conductance = value - self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance + self.validate_Nml2Quantity_conductance( + self.conductance + ) # validate type Nml2Quantity_conductance super(GapJunction, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(GapJunction, self).buildChildren(child_, node, nodeName_, True) pass + + # end class GapJunction class BaseCurrentBasedSynapse(BaseSynapse): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseCurrentBasedSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BaseCurrentBasedSynapse, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseCurrentBasedSynapse) + CurrentSubclassModule_, BaseCurrentBasedSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseCurrentBasedSynapse.subclass: return BaseCurrentBasedSynapse.subclass(*args_, **kwargs_) else: return BaseCurrentBasedSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(BaseCurrentBasedSynapse, self).hasContent_() - ): + if super(BaseCurrentBasedSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCurrentBasedSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseCurrentBasedSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseCurrentBasedSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseCurrentBasedSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCurrentBasedSynapse') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseCurrentBasedSynapse", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseCurrentBasedSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseCurrentBasedSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseCurrentBasedSynapse'): - super(BaseCurrentBasedSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCurrentBasedSynapse') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseCurrentBasedSynapse", + ): + super(BaseCurrentBasedSynapse, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseCurrentBasedSynapse", + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCurrentBasedSynapse', fromsubclass_=False, pretty_print=True): - super(BaseCurrentBasedSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseCurrentBasedSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseCurrentBasedSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -19739,75 +36404,160 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseCurrentBasedSynapse, self).buildAttributes(node, attrs, already_processed) + super(BaseCurrentBasedSynapse, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseCurrentBasedSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BaseCurrentBasedSynapse, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class BaseCurrentBasedSynapse class BaseVoltageDepSynapse(BaseSynapse): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseVoltageDepSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BaseVoltageDepSynapse, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseVoltageDepSynapse) + CurrentSubclassModule_, BaseVoltageDepSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseVoltageDepSynapse.subclass: return BaseVoltageDepSynapse.subclass(*args_, **kwargs_) else: return BaseVoltageDepSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(BaseVoltageDepSynapse, self).hasContent_() - ): + if super(BaseVoltageDepSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseVoltageDepSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseVoltageDepSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseVoltageDepSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseVoltageDepSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseVoltageDepSynapse') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseVoltageDepSynapse", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseVoltageDepSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseVoltageDepSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseVoltageDepSynapse'): - super(BaseVoltageDepSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseVoltageDepSynapse') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseVoltageDepSynapse", + ): + super(BaseVoltageDepSynapse, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseVoltageDepSynapse", + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseVoltageDepSynapse', fromsubclass_=False, pretty_print=True): - super(BaseVoltageDepSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseVoltageDepSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseVoltageDepSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -19815,15 +36565,21 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseVoltageDepSynapse, self).buildAttributes(node, attrs, already_processed) + super(BaseVoltageDepSynapse, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(BaseVoltageDepSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class BaseVoltageDepSynapse @@ -19832,25 +36588,157 @@ class IonChannel(IonChannelScalable): identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. One of these should be removed, probably ionChannelHH.""" + member_data_items_ = [ - MemberSpec_('species', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('type', 'channelTypes', 0, 1, {'use': u'optional'}), - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 1, {'use': u'optional'}), - MemberSpec_('gates', 'GateHHUndetermined', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHUndetermined', u'name': u'gate', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_hh_rates', 'GateHHRates', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHRates', u'name': u'gateHHrates', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_h_hrates_taus', 'GateHHRatesTau', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHRatesTau', u'name': u'gateHHratesTau', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_hh_tau_infs', 'GateHHTauInf', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHTauInf', u'name': u'gateHHtauInf', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_h_hrates_infs', 'GateHHRatesInf', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHRatesInf', u'name': u'gateHHratesInf', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_h_hrates_tau_infs', 'GateHHRatesTauInf', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHRatesTauInf', u'name': u'gateHHratesTauInf', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_hh_instantaneouses', 'GateHHInstantaneous', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHInstantaneous', u'name': u'gateHHInstantaneous', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_fractionals', 'GateFractional', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateFractional', u'name': u'gateFractional', u'minOccurs': u'0'}, 1), + MemberSpec_("species", "NmlId", 0, 1, {"use": u"optional"}), + MemberSpec_("type", "channelTypes", 0, 1, {"use": u"optional"}), + MemberSpec_( + "conductance", "Nml2Quantity_conductance", 0, 1, {"use": u"optional"} + ), + MemberSpec_( + "gates", + "GateHHUndetermined", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"GateHHUndetermined", + u"name": u"gate", + u"minOccurs": u"0", + }, + 1, + ), + MemberSpec_( + "gate_hh_rates", + "GateHHRates", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"GateHHRates", + u"name": u"gateHHrates", + u"minOccurs": u"0", + }, + 1, + ), + MemberSpec_( + "gate_h_hrates_taus", + "GateHHRatesTau", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"GateHHRatesTau", + u"name": u"gateHHratesTau", + u"minOccurs": u"0", + }, + 1, + ), + MemberSpec_( + "gate_hh_tau_infs", + "GateHHTauInf", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"GateHHTauInf", + u"name": u"gateHHtauInf", + u"minOccurs": u"0", + }, + 1, + ), + MemberSpec_( + "gate_h_hrates_infs", + "GateHHRatesInf", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"GateHHRatesInf", + u"name": u"gateHHratesInf", + u"minOccurs": u"0", + }, + 1, + ), + MemberSpec_( + "gate_h_hrates_tau_infs", + "GateHHRatesTauInf", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"GateHHRatesTauInf", + u"name": u"gateHHratesTauInf", + u"minOccurs": u"0", + }, + 1, + ), + MemberSpec_( + "gate_hh_instantaneouses", + "GateHHInstantaneous", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"GateHHInstantaneous", + u"name": u"gateHHInstantaneous", + u"minOccurs": u"0", + }, + 1, + ), + MemberSpec_( + "gate_fractionals", + "GateFractional", + 1, + 1, + { + u"maxOccurs": u"unbounded", + u"type": u"GateFractional", + u"name": u"gateFractional", + u"minOccurs": u"0", + }, + 1, + ), ] subclass = None superclass = IonChannelScalable - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, species=None, type=None, conductance=None, gates=None, gate_hh_rates=None, gate_h_hrates_taus=None, gate_hh_tau_infs=None, gate_h_hrates_infs=None, gate_h_hrates_tau_infs=None, gate_hh_instantaneouses=None, gate_fractionals=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + q10_conductance_scalings=None, + species=None, + type=None, + conductance=None, + gates=None, + gate_hh_rates=None, + gate_h_hrates_taus=None, + gate_hh_tau_infs=None, + gate_h_hrates_infs=None, + gate_h_hrates_tau_infs=None, + gate_hh_instantaneouses=None, + gate_fractionals=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannel, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, q10_conductance_scalings, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IonChannel, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + q10_conductance_scalings, + extensiontype_, + **kwargs_ + ) self.species = _cast(None, species) self.type = _cast(None, type) self.conductance = _cast(None, conductance) @@ -19887,116 +36775,241 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti else: self.gate_fractionals = gate_fractionals self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IonChannel) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IonChannel) if subclass is not None: return subclass(*args_, **kwargs_) if IonChannel.subclass: return IonChannel.subclass(*args_, **kwargs_) else: return IonChannel(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def validate_channelTypes(self, value): # Validate type channelTypes, a restriction on xs:string. if value is not None and Validate_simpletypes_: value = str(value) - enumerations = ['ionChannelPassive', 'ionChannelHH'] + enumerations = ["ionChannelPassive", "ionChannelHH"] enumeration_respectee = False for enum in enumerations: if value == enum: enumeration_respectee = True break if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on channelTypes' % {"value" : value.encode("utf-8")} ) + warnings_.warn( + 'Value "%(value)s" does not match xsd enumeration restriction on channelTypes' + % {"value": value.encode("utf-8")} + ) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$"] + ] + def hasContent_(self): if ( - self.gates or - self.gate_hh_rates or - self.gate_h_hrates_taus or - self.gate_hh_tau_infs or - self.gate_h_hrates_infs or - self.gate_h_hrates_tau_infs or - self.gate_hh_instantaneouses or - self.gate_fractionals or - super(IonChannel, self).hasContent_() + self.gates + or self.gate_hh_rates + or self.gate_h_hrates_taus + or self.gate_hh_tau_infs + or self.gate_h_hrates_infs + or self.gate_h_hrates_tau_infs + or self.gate_hh_instantaneouses + or self.gate_fractionals + or super(IonChannel, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannel', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannel') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannel", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IonChannel") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannel') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IonChannel" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannel', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IonChannel", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannel'): - super(IonChannel, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannel') - if self.species is not None and 'species' not in already_processed: - already_processed.add('species') - outfile.write(' species=%s' % (quote_attrib(self.species), )) - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.conductance is not None and 'conductance' not in already_processed: - already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="IonChannel" + ): + super(IonChannel, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IonChannel" + ) + if self.species is not None and "species" not in already_processed: + already_processed.add("species") + outfile.write(" species=%s" % (quote_attrib(self.species),)) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write(" type=%s" % (quote_attrib(self.type),)) + if self.conductance is not None and "conductance" not in already_processed: + already_processed.add("conductance") + outfile.write(" conductance=%s" % (quote_attrib(self.conductance),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannel', fromsubclass_=False, pretty_print=True): - super(IonChannel, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannel", + fromsubclass_=False, + pretty_print=True, + ): + super(IonChannel, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for gate_ in self.gates: - gate_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gate', pretty_print=pretty_print) + gate_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gate", + pretty_print=pretty_print, + ) for gateHHrates_ in self.gate_hh_rates: - gateHHrates_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHrates', pretty_print=pretty_print) + gateHHrates_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateHHrates", + pretty_print=pretty_print, + ) for gateHHratesTau_ in self.gate_h_hrates_taus: - gateHHratesTau_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHratesTau', pretty_print=pretty_print) + gateHHratesTau_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateHHratesTau", + pretty_print=pretty_print, + ) for gateHHtauInf_ in self.gate_hh_tau_infs: - gateHHtauInf_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHtauInf', pretty_print=pretty_print) + gateHHtauInf_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateHHtauInf", + pretty_print=pretty_print, + ) for gateHHratesInf_ in self.gate_h_hrates_infs: - gateHHratesInf_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHratesInf', pretty_print=pretty_print) + gateHHratesInf_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateHHratesInf", + pretty_print=pretty_print, + ) for gateHHratesTauInf_ in self.gate_h_hrates_tau_infs: - gateHHratesTauInf_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHratesTauInf', pretty_print=pretty_print) + gateHHratesTauInf_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateHHratesTauInf", + pretty_print=pretty_print, + ) for gateHHInstantaneous_ in self.gate_hh_instantaneouses: - gateHHInstantaneous_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHInstantaneous', pretty_print=pretty_print) + gateHHInstantaneous_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateHHInstantaneous", + pretty_print=pretty_print, + ) for gateFractional_ in self.gate_fractionals: - gateFractional_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateFractional', pretty_print=pretty_print) + gateFractional_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateFractional", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -20004,124 +37017,198 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('species', node) - if value is not None and 'species' not in already_processed: - already_processed.add('species') + value = find_attr_value_("species", node) + if value is not None and "species" not in already_processed: + already_processed.add("species") self.species = value - self.validate_NmlId(self.species) # validate type NmlId - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + self.validate_NmlId(self.species) # validate type NmlId + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_channelTypes(self.type) # validate type channelTypes - value = find_attr_value_('conductance', node) - if value is not None and 'conductance' not in already_processed: - already_processed.add('conductance') + self.validate_channelTypes(self.type) # validate type channelTypes + value = find_attr_value_("conductance", node) + if value is not None and "conductance" not in already_processed: + already_processed.add("conductance") self.conductance = value - self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_conductance( + self.conductance + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(IonChannel, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'gate': + if nodeName_ == "gate": obj_ = GateHHUndetermined.factory(parent_object_=self) obj_.build(child_) self.gates.append(obj_) - obj_.original_tagname_ = 'gate' - elif nodeName_ == 'gateHHrates': + obj_.original_tagname_ = "gate" + elif nodeName_ == "gateHHrates": obj_ = GateHHRates.factory(parent_object_=self) obj_.build(child_) self.gate_hh_rates.append(obj_) - obj_.original_tagname_ = 'gateHHrates' - elif nodeName_ == 'gateHHratesTau': + obj_.original_tagname_ = "gateHHrates" + elif nodeName_ == "gateHHratesTau": obj_ = GateHHRatesTau.factory(parent_object_=self) obj_.build(child_) self.gate_h_hrates_taus.append(obj_) - obj_.original_tagname_ = 'gateHHratesTau' - elif nodeName_ == 'gateHHtauInf': + obj_.original_tagname_ = "gateHHratesTau" + elif nodeName_ == "gateHHtauInf": obj_ = GateHHTauInf.factory(parent_object_=self) obj_.build(child_) self.gate_hh_tau_infs.append(obj_) - obj_.original_tagname_ = 'gateHHtauInf' - elif nodeName_ == 'gateHHratesInf': + obj_.original_tagname_ = "gateHHtauInf" + elif nodeName_ == "gateHHratesInf": obj_ = GateHHRatesInf.factory(parent_object_=self) obj_.build(child_) self.gate_h_hrates_infs.append(obj_) - obj_.original_tagname_ = 'gateHHratesInf' - elif nodeName_ == 'gateHHratesTauInf': + obj_.original_tagname_ = "gateHHratesInf" + elif nodeName_ == "gateHHratesTauInf": obj_ = GateHHRatesTauInf.factory(parent_object_=self) obj_.build(child_) self.gate_h_hrates_tau_infs.append(obj_) - obj_.original_tagname_ = 'gateHHratesTauInf' - elif nodeName_ == 'gateHHInstantaneous': + obj_.original_tagname_ = "gateHHratesTauInf" + elif nodeName_ == "gateHHInstantaneous": obj_ = GateHHInstantaneous.factory(parent_object_=self) obj_.build(child_) self.gate_hh_instantaneouses.append(obj_) - obj_.original_tagname_ = 'gateHHInstantaneous' - elif nodeName_ == 'gateFractional': + obj_.original_tagname_ = "gateHHInstantaneous" + elif nodeName_ == "gateFractional": obj_ = GateFractional.factory(parent_object_=self) obj_.build(child_) self.gate_fractionals.append(obj_) - obj_.original_tagname_ = 'gateFractional' + obj_.original_tagname_ = "gateFractional" super(IonChannel, self).buildChildren(child_, node, nodeName_, True) + + # end class IonChannel class AlphaCurrSynapse(BasePynnSynapse): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = BasePynnSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + tau_syn=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(AlphaCurrSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(AlphaCurrSynapse, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, AlphaCurrSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, AlphaCurrSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if AlphaCurrSynapse.subclass: return AlphaCurrSynapse.subclass(*args_, **kwargs_) else: return AlphaCurrSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(AlphaCurrSynapse, self).hasContent_() - ): + if super(AlphaCurrSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCurrSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('AlphaCurrSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaCurrSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("AlphaCurrSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrSynapse') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="AlphaCurrSynapse", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaCurrSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="AlphaCurrSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaCurrSynapse'): - super(AlphaCurrSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrSynapse') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCurrSynapse', fromsubclass_=False, pretty_print=True): - super(AlphaCurrSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="AlphaCurrSynapse", + ): + super(AlphaCurrSynapse, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="AlphaCurrSynapse", + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaCurrSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(AlphaCurrSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -20129,66 +37216,130 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(AlphaCurrSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(AlphaCurrSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class AlphaCurrSynapse class ExpCurrSynapse(BasePynnSynapse): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = BasePynnSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + tau_syn=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ExpCurrSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ExpCurrSynapse, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExpCurrSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ExpCurrSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if ExpCurrSynapse.subclass: return ExpCurrSynapse.subclass(*args_, **kwargs_) else: return ExpCurrSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(ExpCurrSynapse, self).hasContent_() - ): + if super(ExpCurrSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpCurrSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpCurrSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpCurrSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExpCurrSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCurrSynapse') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpCurrSynapse" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpCurrSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExpCurrSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpCurrSynapse'): - super(ExpCurrSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCurrSynapse') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpCurrSynapse', fromsubclass_=False, pretty_print=True): - super(ExpCurrSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExpCurrSynapse", + ): + super(ExpCurrSynapse, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpCurrSynapse" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpCurrSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(ExpCurrSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -20196,71 +37347,147 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(ExpCurrSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ExpCurrSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class ExpCurrSynapse class AlphaCondSynapse(BasePynnSynapse): member_data_items_ = [ - MemberSpec_('e_rev', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("e_rev", "xs:float", 0, 0, {"use": u"required"}), ] subclass = None superclass = BasePynnSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, e_rev=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + tau_syn=None, + e_rev=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(AlphaCondSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(AlphaCondSynapse, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_ + ) self.e_rev = _cast(float, e_rev) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, AlphaCondSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, AlphaCondSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if AlphaCondSynapse.subclass: return AlphaCondSynapse.subclass(*args_, **kwargs_) else: return AlphaCondSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(AlphaCondSynapse, self).hasContent_() - ): + if super(AlphaCondSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCondSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('AlphaCondSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaCondSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("AlphaCondSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCondSynapse') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="AlphaCondSynapse", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaCondSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="AlphaCondSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaCondSynapse'): - super(AlphaCondSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCondSynapse') - if self.e_rev is not None and 'e_rev' not in already_processed: - already_processed.add('e_rev') - outfile.write(' e_rev="%s"' % self.gds_format_float(self.e_rev, input_name='e_rev')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCondSynapse', fromsubclass_=False, pretty_print=True): - super(AlphaCondSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="AlphaCondSynapse", + ): + super(AlphaCondSynapse, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="AlphaCondSynapse", + ) + if self.e_rev is not None and "e_rev" not in already_processed: + already_processed.add("e_rev") + outfile.write( + ' e_rev="%s"' % self.gds_format_float(self.e_rev, input_name="e_rev") + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaCondSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(AlphaCondSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -20268,78 +37495,146 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('e_rev', node) - if value is not None and 'e_rev' not in already_processed: - already_processed.add('e_rev') + value = find_attr_value_("e_rev", node) + if value is not None and "e_rev" not in already_processed: + already_processed.add("e_rev") try: self.e_rev = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev): %s' % exp) + raise ValueError("Bad float/double attribute (e_rev): %s" % exp) super(AlphaCondSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(AlphaCondSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class AlphaCondSynapse class ExpCondSynapse(BasePynnSynapse): member_data_items_ = [ - MemberSpec_('e_rev', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("e_rev", "xs:float", 0, 0, {"use": u"required"}), ] subclass = None superclass = BasePynnSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, e_rev=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + tau_syn=None, + e_rev=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ExpCondSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ExpCondSynapse, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_ + ) self.e_rev = _cast(float, e_rev) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExpCondSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ExpCondSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if ExpCondSynapse.subclass: return ExpCondSynapse.subclass(*args_, **kwargs_) else: return ExpCondSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(ExpCondSynapse, self).hasContent_() - ): + if super(ExpCondSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpCondSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpCondSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpCondSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExpCondSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCondSynapse') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpCondSynapse" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpCondSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExpCondSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpCondSynapse'): - super(ExpCondSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCondSynapse') - if self.e_rev is not None and 'e_rev' not in already_processed: - already_processed.add('e_rev') - outfile.write(' e_rev="%s"' % self.gds_format_float(self.e_rev, input_name='e_rev')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpCondSynapse', fromsubclass_=False, pretty_print=True): - super(ExpCondSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExpCondSynapse", + ): + super(ExpCondSynapse, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpCondSynapse" + ) + if self.e_rev is not None and "e_rev" not in already_processed: + already_processed.add("e_rev") + outfile.write( + ' e_rev="%s"' % self.gds_format_float(self.e_rev, input_name="e_rev") + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpCondSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(ExpCondSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -20347,39 +37642,80 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('e_rev', node) - if value is not None and 'e_rev' not in already_processed: - already_processed.add('e_rev') + value = find_attr_value_("e_rev", node) + if value is not None and "e_rev" not in already_processed: + already_processed.add("e_rev") try: self.e_rev = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev): %s' % exp) + raise ValueError("Bad float/double attribute (e_rev): %s" % exp) super(ExpCondSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ExpCondSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class ExpCondSynapse class HH_cond_exp(basePyNNCell): member_data_items_ = [ - MemberSpec_('v_offset', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_E', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_I', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_K', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_Na', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_leak', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('g_leak', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('gbar_K', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('gbar_Na', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("v_offset", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("e_rev_E", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("e_rev_I", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("e_rev_K", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("e_rev_Na", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("e_rev_leak", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("g_leak", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("gbar_K", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("gbar_Na", "xs:float", 0, 0, {"use": u"required"}), ] subclass = None superclass = basePyNNCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, v_offset=None, e_rev_E=None, e_rev_I=None, e_rev_K=None, e_rev_Na=None, e_rev_leak=None, g_leak=None, gbar_K=None, gbar_Na=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + v_offset=None, + e_rev_E=None, + e_rev_I=None, + e_rev_K=None, + e_rev_Na=None, + e_rev_leak=None, + g_leak=None, + gbar_K=None, + gbar_Na=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(HH_cond_exp, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(HH_cond_exp, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + **kwargs_ + ) self.v_offset = _cast(float, v_offset) self.e_rev_E = _cast(float, e_rev_E) self.e_rev_I = _cast(float, e_rev_I) @@ -20389,76 +37725,149 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti self.g_leak = _cast(float, g_leak) self.gbar_K = _cast(float, gbar_K) self.gbar_Na = _cast(float, gbar_Na) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, HH_cond_exp) + subclass = getSubclassFromModule_(CurrentSubclassModule_, HH_cond_exp) if subclass is not None: return subclass(*args_, **kwargs_) if HH_cond_exp.subclass: return HH_cond_exp.subclass(*args_, **kwargs_) else: return HH_cond_exp(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(HH_cond_exp, self).hasContent_() - ): + if super(HH_cond_exp, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HH_cond_exp', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('HH_cond_exp') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HH_cond_exp", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("HH_cond_exp") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HH_cond_exp') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="HH_cond_exp" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HH_cond_exp', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="HH_cond_exp", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HH_cond_exp'): - super(HH_cond_exp, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HH_cond_exp') - if self.v_offset is not None and 'v_offset' not in already_processed: - already_processed.add('v_offset') - outfile.write(' v_offset="%s"' % self.gds_format_float(self.v_offset, input_name='v_offset')) - if self.e_rev_E is not None and 'e_rev_E' not in already_processed: - already_processed.add('e_rev_E') - outfile.write(' e_rev_E="%s"' % self.gds_format_float(self.e_rev_E, input_name='e_rev_E')) - if self.e_rev_I is not None and 'e_rev_I' not in already_processed: - already_processed.add('e_rev_I') - outfile.write(' e_rev_I="%s"' % self.gds_format_float(self.e_rev_I, input_name='e_rev_I')) - if self.e_rev_K is not None and 'e_rev_K' not in already_processed: - already_processed.add('e_rev_K') - outfile.write(' e_rev_K="%s"' % self.gds_format_float(self.e_rev_K, input_name='e_rev_K')) - if self.e_rev_Na is not None and 'e_rev_Na' not in already_processed: - already_processed.add('e_rev_Na') - outfile.write(' e_rev_Na="%s"' % self.gds_format_float(self.e_rev_Na, input_name='e_rev_Na')) - if self.e_rev_leak is not None and 'e_rev_leak' not in already_processed: - already_processed.add('e_rev_leak') - outfile.write(' e_rev_leak="%s"' % self.gds_format_float(self.e_rev_leak, input_name='e_rev_leak')) - if self.g_leak is not None and 'g_leak' not in already_processed: - already_processed.add('g_leak') - outfile.write(' g_leak="%s"' % self.gds_format_float(self.g_leak, input_name='g_leak')) - if self.gbar_K is not None and 'gbar_K' not in already_processed: - already_processed.add('gbar_K') - outfile.write(' gbar_K="%s"' % self.gds_format_float(self.gbar_K, input_name='gbar_K')) - if self.gbar_Na is not None and 'gbar_Na' not in already_processed: - already_processed.add('gbar_Na') - outfile.write(' gbar_Na="%s"' % self.gds_format_float(self.gbar_Na, input_name='gbar_Na')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HH_cond_exp', fromsubclass_=False, pretty_print=True): - super(HH_cond_exp, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="HH_cond_exp", + ): + super(HH_cond_exp, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="HH_cond_exp" + ) + if self.v_offset is not None and "v_offset" not in already_processed: + already_processed.add("v_offset") + outfile.write( + ' v_offset="%s"' + % self.gds_format_float(self.v_offset, input_name="v_offset") + ) + if self.e_rev_E is not None and "e_rev_E" not in already_processed: + already_processed.add("e_rev_E") + outfile.write( + ' e_rev_E="%s"' + % self.gds_format_float(self.e_rev_E, input_name="e_rev_E") + ) + if self.e_rev_I is not None and "e_rev_I" not in already_processed: + already_processed.add("e_rev_I") + outfile.write( + ' e_rev_I="%s"' + % self.gds_format_float(self.e_rev_I, input_name="e_rev_I") + ) + if self.e_rev_K is not None and "e_rev_K" not in already_processed: + already_processed.add("e_rev_K") + outfile.write( + ' e_rev_K="%s"' + % self.gds_format_float(self.e_rev_K, input_name="e_rev_K") + ) + if self.e_rev_Na is not None and "e_rev_Na" not in already_processed: + already_processed.add("e_rev_Na") + outfile.write( + ' e_rev_Na="%s"' + % self.gds_format_float(self.e_rev_Na, input_name="e_rev_Na") + ) + if self.e_rev_leak is not None and "e_rev_leak" not in already_processed: + already_processed.add("e_rev_leak") + outfile.write( + ' e_rev_leak="%s"' + % self.gds_format_float(self.e_rev_leak, input_name="e_rev_leak") + ) + if self.g_leak is not None and "g_leak" not in already_processed: + already_processed.add("g_leak") + outfile.write( + ' g_leak="%s"' % self.gds_format_float(self.g_leak, input_name="g_leak") + ) + if self.gbar_K is not None and "gbar_K" not in already_processed: + already_processed.add("gbar_K") + outfile.write( + ' gbar_K="%s"' % self.gds_format_float(self.gbar_K, input_name="gbar_K") + ) + if self.gbar_Na is not None and "gbar_Na" not in already_processed: + already_processed.add("gbar_Na") + outfile.write( + ' gbar_Na="%s"' + % self.gds_format_float(self.gbar_Na, input_name="gbar_Na") + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HH_cond_exp", + fromsubclass_=False, + pretty_print=True, + ): + super(HH_cond_exp, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -20466,159 +37875,259 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('v_offset', node) - if value is not None and 'v_offset' not in already_processed: - already_processed.add('v_offset') + value = find_attr_value_("v_offset", node) + if value is not None and "v_offset" not in already_processed: + already_processed.add("v_offset") try: self.v_offset = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (v_offset): %s' % exp) - value = find_attr_value_('e_rev_E', node) - if value is not None and 'e_rev_E' not in already_processed: - already_processed.add('e_rev_E') + raise ValueError("Bad float/double attribute (v_offset): %s" % exp) + value = find_attr_value_("e_rev_E", node) + if value is not None and "e_rev_E" not in already_processed: + already_processed.add("e_rev_E") try: self.e_rev_E = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_E): %s' % exp) - value = find_attr_value_('e_rev_I', node) - if value is not None and 'e_rev_I' not in already_processed: - already_processed.add('e_rev_I') + raise ValueError("Bad float/double attribute (e_rev_E): %s" % exp) + value = find_attr_value_("e_rev_I", node) + if value is not None and "e_rev_I" not in already_processed: + already_processed.add("e_rev_I") try: self.e_rev_I = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_I): %s' % exp) - value = find_attr_value_('e_rev_K', node) - if value is not None and 'e_rev_K' not in already_processed: - already_processed.add('e_rev_K') + raise ValueError("Bad float/double attribute (e_rev_I): %s" % exp) + value = find_attr_value_("e_rev_K", node) + if value is not None and "e_rev_K" not in already_processed: + already_processed.add("e_rev_K") try: self.e_rev_K = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_K): %s' % exp) - value = find_attr_value_('e_rev_Na', node) - if value is not None and 'e_rev_Na' not in already_processed: - already_processed.add('e_rev_Na') + raise ValueError("Bad float/double attribute (e_rev_K): %s" % exp) + value = find_attr_value_("e_rev_Na", node) + if value is not None and "e_rev_Na" not in already_processed: + already_processed.add("e_rev_Na") try: self.e_rev_Na = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_Na): %s' % exp) - value = find_attr_value_('e_rev_leak', node) - if value is not None and 'e_rev_leak' not in already_processed: - already_processed.add('e_rev_leak') + raise ValueError("Bad float/double attribute (e_rev_Na): %s" % exp) + value = find_attr_value_("e_rev_leak", node) + if value is not None and "e_rev_leak" not in already_processed: + already_processed.add("e_rev_leak") try: self.e_rev_leak = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_leak): %s' % exp) - value = find_attr_value_('g_leak', node) - if value is not None and 'g_leak' not in already_processed: - already_processed.add('g_leak') + raise ValueError("Bad float/double attribute (e_rev_leak): %s" % exp) + value = find_attr_value_("g_leak", node) + if value is not None and "g_leak" not in already_processed: + already_processed.add("g_leak") try: self.g_leak = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (g_leak): %s' % exp) - value = find_attr_value_('gbar_K', node) - if value is not None and 'gbar_K' not in already_processed: - already_processed.add('gbar_K') + raise ValueError("Bad float/double attribute (g_leak): %s" % exp) + value = find_attr_value_("gbar_K", node) + if value is not None and "gbar_K" not in already_processed: + already_processed.add("gbar_K") try: self.gbar_K = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (gbar_K): %s' % exp) - value = find_attr_value_('gbar_Na', node) - if value is not None and 'gbar_Na' not in already_processed: - already_processed.add('gbar_Na') + raise ValueError("Bad float/double attribute (gbar_K): %s" % exp) + value = find_attr_value_("gbar_Na", node) + if value is not None and "gbar_Na" not in already_processed: + already_processed.add("gbar_Na") try: self.gbar_Na = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (gbar_Na): %s' % exp) + raise ValueError("Bad float/double attribute (gbar_Na): %s" % exp) super(HH_cond_exp, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(HH_cond_exp, self).buildChildren(child_, node, nodeName_, True) pass + + # end class HH_cond_exp class basePyNNIaFCell(basePyNNCell): member_data_items_ = [ - MemberSpec_('tau_m', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('tau_refrac', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_reset', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_rest', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_thresh', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("tau_m", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("tau_refrac", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("v_reset", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("v_rest", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("v_thresh", "xs:float", 0, 0, {"use": u"required"}), ] subclass = None superclass = basePyNNCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(basePyNNIaFCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(basePyNNIaFCell, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + extensiontype_, + **kwargs_ + ) self.tau_m = _cast(float, tau_m) self.tau_refrac = _cast(float, tau_refrac) self.v_reset = _cast(float, v_reset) self.v_rest = _cast(float, v_rest) self.v_thresh = _cast(float, v_thresh) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, basePyNNIaFCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, basePyNNIaFCell) if subclass is not None: return subclass(*args_, **kwargs_) if basePyNNIaFCell.subclass: return basePyNNIaFCell.subclass(*args_, **kwargs_) else: return basePyNNIaFCell(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(basePyNNIaFCell, self).hasContent_() - ): + if super(basePyNNIaFCell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNIaFCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('basePyNNIaFCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="basePyNNIaFCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("basePyNNIaFCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCell') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="basePyNNIaFCell" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='basePyNNIaFCell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="basePyNNIaFCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='basePyNNIaFCell'): - super(basePyNNIaFCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCell') - if self.tau_m is not None and 'tau_m' not in already_processed: - already_processed.add('tau_m') - outfile.write(' tau_m="%s"' % self.gds_format_float(self.tau_m, input_name='tau_m')) - if self.tau_refrac is not None and 'tau_refrac' not in already_processed: - already_processed.add('tau_refrac') - outfile.write(' tau_refrac="%s"' % self.gds_format_float(self.tau_refrac, input_name='tau_refrac')) - if self.v_reset is not None and 'v_reset' not in already_processed: - already_processed.add('v_reset') - outfile.write(' v_reset="%s"' % self.gds_format_float(self.v_reset, input_name='v_reset')) - if self.v_rest is not None and 'v_rest' not in already_processed: - already_processed.add('v_rest') - outfile.write(' v_rest="%s"' % self.gds_format_float(self.v_rest, input_name='v_rest')) - if self.v_thresh is not None and 'v_thresh' not in already_processed: - already_processed.add('v_thresh') - outfile.write(' v_thresh="%s"' % self.gds_format_float(self.v_thresh, input_name='v_thresh')) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="basePyNNIaFCell", + ): + super(basePyNNIaFCell, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="basePyNNIaFCell" + ) + if self.tau_m is not None and "tau_m" not in already_processed: + already_processed.add("tau_m") + outfile.write( + ' tau_m="%s"' % self.gds_format_float(self.tau_m, input_name="tau_m") + ) + if self.tau_refrac is not None and "tau_refrac" not in already_processed: + already_processed.add("tau_refrac") + outfile.write( + ' tau_refrac="%s"' + % self.gds_format_float(self.tau_refrac, input_name="tau_refrac") + ) + if self.v_reset is not None and "v_reset" not in already_processed: + already_processed.add("v_reset") + outfile.write( + ' v_reset="%s"' + % self.gds_format_float(self.v_reset, input_name="v_reset") + ) + if self.v_rest is not None and "v_rest" not in already_processed: + already_processed.add("v_rest") + outfile.write( + ' v_rest="%s"' % self.gds_format_float(self.v_rest, input_name="v_rest") + ) + if self.v_thresh is not None and "v_thresh" not in already_processed: + already_processed.add("v_thresh") + outfile.write( + ' v_thresh="%s"' + % self.gds_format_float(self.v_thresh, input_name="v_thresh") + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNIaFCell', fromsubclass_=False, pretty_print=True): - super(basePyNNIaFCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="basePyNNIaFCell", + fromsubclass_=False, + pretty_print=True, + ): + super(basePyNNIaFCell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -20626,128 +38135,229 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tau_m', node) - if value is not None and 'tau_m' not in already_processed: - already_processed.add('tau_m') + value = find_attr_value_("tau_m", node) + if value is not None and "tau_m" not in already_processed: + already_processed.add("tau_m") try: self.tau_m = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_m): %s' % exp) - value = find_attr_value_('tau_refrac', node) - if value is not None and 'tau_refrac' not in already_processed: - already_processed.add('tau_refrac') + raise ValueError("Bad float/double attribute (tau_m): %s" % exp) + value = find_attr_value_("tau_refrac", node) + if value is not None and "tau_refrac" not in already_processed: + already_processed.add("tau_refrac") try: self.tau_refrac = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_refrac): %s' % exp) - value = find_attr_value_('v_reset', node) - if value is not None and 'v_reset' not in already_processed: - already_processed.add('v_reset') + raise ValueError("Bad float/double attribute (tau_refrac): %s" % exp) + value = find_attr_value_("v_reset", node) + if value is not None and "v_reset" not in already_processed: + already_processed.add("v_reset") try: self.v_reset = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (v_reset): %s' % exp) - value = find_attr_value_('v_rest', node) - if value is not None and 'v_rest' not in already_processed: - already_processed.add('v_rest') + raise ValueError("Bad float/double attribute (v_reset): %s" % exp) + value = find_attr_value_("v_rest", node) + if value is not None and "v_rest" not in already_processed: + already_processed.add("v_rest") try: self.v_rest = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (v_rest): %s' % exp) - value = find_attr_value_('v_thresh', node) - if value is not None and 'v_thresh' not in already_processed: - already_processed.add('v_thresh') + raise ValueError("Bad float/double attribute (v_rest): %s" % exp) + value = find_attr_value_("v_thresh", node) + if value is not None and "v_thresh" not in already_processed: + already_processed.add("v_thresh") try: self.v_thresh = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (v_thresh): %s' % exp) - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise ValueError("Bad float/double attribute (v_thresh): %s" % exp) + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(basePyNNIaFCell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(basePyNNIaFCell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class basePyNNIaFCell class ContinuousConnection(BaseConnectionNewFormat): """Individual continuous/analog synaptic connection""" + member_data_items_ = [ - MemberSpec_('pre_component', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('post_component', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_("pre_component", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("post_component", "NmlId", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseConnectionNewFormat - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', pre_component=None, post_component=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + pre_component=None, + post_component=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ContinuousConnection, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ContinuousConnection, self).__init__( + neuro_lex_id, + id, + pre_cell, + pre_segment, + pre_fraction_along, + post_cell, + post_segment, + post_fraction_along, + extensiontype_, + **kwargs_ + ) self.pre_component = _cast(None, pre_component) self.post_component = _cast(None, post_component) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ContinuousConnection) + CurrentSubclassModule_, ContinuousConnection + ) if subclass is not None: return subclass(*args_, **kwargs_) if ContinuousConnection.subclass: return ContinuousConnection.subclass(*args_, **kwargs_) else: return ContinuousConnection(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): - if ( - super(ContinuousConnection, self).hasContent_() - ): + if super(ContinuousConnection, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContinuousConnection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousConnection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ContinuousConnection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnection') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousConnection", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousConnection', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousConnection'): - super(ContinuousConnection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnection') - if self.pre_component is not None and 'pre_component' not in already_processed: - already_processed.add('pre_component') - outfile.write(' preComponent=%s' % (quote_attrib(self.pre_component), )) - if self.post_component is not None and 'post_component' not in already_processed: - already_processed.add('post_component') - outfile.write(' postComponent=%s' % (quote_attrib(self.post_component), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ContinuousConnection", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ContinuousConnection", + ): + super(ContinuousConnection, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousConnection", + ) + if self.pre_component is not None and "pre_component" not in already_processed: + already_processed.add("pre_component") + outfile.write(" preComponent=%s" % (quote_attrib(self.pre_component),)) + if ( + self.post_component is not None + and "post_component" not in already_processed + ): + already_processed.add("post_component") + outfile.write(" postComponent=%s" % (quote_attrib(self.post_component),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnection', fromsubclass_=False, pretty_print=True): - super(ContinuousConnection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousConnection", + fromsubclass_=False, + pretty_print=True, + ): + super(ContinuousConnection, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -20755,29 +38365,32 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('preComponent', node) - if value is not None and 'preComponent' not in already_processed: - already_processed.add('preComponent') + value = find_attr_value_("preComponent", node) + if value is not None and "preComponent" not in already_processed: + already_processed.add("preComponent") self.pre_component = value - self.validate_NmlId(self.pre_component) # validate type NmlId - value = find_attr_value_('postComponent', node) - if value is not None and 'postComponent' not in already_processed: - already_processed.add('postComponent') + self.validate_NmlId(self.pre_component) # validate type NmlId + value = find_attr_value_("postComponent", node) + if value is not None and "postComponent" not in already_processed: + already_processed.add("postComponent") self.post_component = value - self.validate_NmlId(self.post_component) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.post_component) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(ContinuousConnection, self).buildAttributes(node, attrs, already_processed) + super(ContinuousConnection, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ContinuousConnection, self).buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): - return int(float(id_string)) - + return int(float(id_string)) def get_pre_cell_id(self): """Get the ID of the pre-synaptic cell @@ -20825,96 +38438,213 @@ def get_post_fraction_along(self): return float(self.post_fraction_along) - def get_pre_info(self): """Get pre-synaptic information summary""" - return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') + return str(self.get_pre_cell_id()) + ( + ":" + + str(self.get_pre_segment_id()) + + "(" + + "%.5f" % self.get_pre_fraction_along() + + ")" + if self.get_pre_segment_id() != 0 or self.get_pre_fraction_along() != 0.5 + else "" + ) def get_post_info(self): """Get post-synaptic information summary""" - return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') - + return str(self.get_post_cell_id()) + ( + ":" + + str(self.get_post_segment_id()) + + "(" + + "%.5f" % self.get_post_fraction_along() + + ")" + if self.get_post_segment_id() != 0 or self.get_post_fraction_along() != 0.5 + else "" + ) def __str__(self): - return "Continuous Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component) - + return ( + "Continuous Connection " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", pre comp: " + + str(self.pre_component) + + ", post comp: " + + str(self.post_component) + ) # end class ContinuousConnection class ElectricalConnection(BaseConnectionNewFormat): """Individual electrical synaptic connection""" + member_data_items_ = [ - MemberSpec_('synapse', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_("synapse", "NmlId", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseConnectionNewFormat - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', synapse=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + synapse=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ElectricalConnection, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ElectricalConnection, self).__init__( + neuro_lex_id, + id, + pre_cell, + pre_segment, + pre_fraction_along, + post_cell, + post_segment, + post_fraction_along, + extensiontype_, + **kwargs_ + ) self.synapse = _cast(None, synapse) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ElectricalConnection) + CurrentSubclassModule_, ElectricalConnection + ) if subclass is not None: return subclass(*args_, **kwargs_) if ElectricalConnection.subclass: return ElectricalConnection.subclass(*args_, **kwargs_) else: return ElectricalConnection(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): - if ( - super(ElectricalConnection, self).hasContent_() - ): + if super(ElectricalConnection, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ElectricalConnection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalConnection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ElectricalConnection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnection') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalConnection", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalConnection', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalConnection'): - super(ElectricalConnection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnection') - if self.synapse is not None and 'synapse' not in already_processed: - already_processed.add('synapse') - outfile.write(' synapse=%s' % (quote_attrib(self.synapse), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ElectricalConnection", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ElectricalConnection", + ): + super(ElectricalConnection, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalConnection", + ) + if self.synapse is not None and "synapse" not in already_processed: + already_processed.add("synapse") + outfile.write(" synapse=%s" % (quote_attrib(self.synapse),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnection', fromsubclass_=False, pretty_print=True): - super(ElectricalConnection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalConnection", + fromsubclass_=False, + pretty_print=True, + ): + super(ElectricalConnection, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -20922,23 +38652,27 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('synapse', node) - if value is not None and 'synapse' not in already_processed: - already_processed.add('synapse') + value = find_attr_value_("synapse", node) + if value is not None and "synapse" not in already_processed: + already_processed.add("synapse") self.synapse = value - self.validate_NmlId(self.synapse) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.synapse) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(ElectricalConnection, self).buildAttributes(node, attrs, already_processed) + super(ElectricalConnection, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ElectricalConnection, self).buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): - return int(float(id_string)) + return int(float(id_string)) def get_pre_cell_id(self): """Get the ID of the pre-synaptic cell @@ -20986,96 +38720,204 @@ def get_post_fraction_along(self): return float(self.post_fraction_along) - def get_pre_info(self): """Get pre-synaptic information summary""" - return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') + return str(self.get_pre_cell_id()) + ( + ":" + + str(self.get_pre_segment_id()) + + "(" + + "%.5f" % self.get_pre_fraction_along() + + ")" + if self.get_pre_segment_id() != 0 or self.get_pre_fraction_along() != 0.5 + else "" + ) def get_post_info(self): """Get post-synaptic information summary""" - return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') - + return str(self.get_post_cell_id()) + ( + ":" + + str(self.get_post_segment_id()) + + "(" + + "%.5f" % self.get_post_fraction_along() + + ")" + if self.get_post_segment_id() != 0 or self.get_post_fraction_along() != 0.5 + else "" + ) def __str__(self): - return "Electrical Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", synapse: "+str(self.synapse) - + return ( + "Electrical Connection " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", synapse: " + + str(self.synapse) + ) # end class ElectricalConnection class ConnectionWD(BaseConnectionOldFormat): """Individual synaptic connection with weight and delay""" + member_data_items_ = [ - MemberSpec_('weight', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("weight", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("delay", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseConnectionOldFormat - def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id='0', pre_fraction_along='0.5', post_cell_id=None, post_segment_id='0', post_fraction_along='0.5', weight=None, delay=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell_id=None, + pre_segment_id="0", + pre_fraction_along="0.5", + post_cell_id=None, + post_segment_id="0", + post_fraction_along="0.5", + weight=None, + delay=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ConnectionWD, self).__init__(neuro_lex_id, id, pre_cell_id, pre_segment_id, pre_fraction_along, post_cell_id, post_segment_id, post_fraction_along, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ConnectionWD, self).__init__( + neuro_lex_id, + id, + pre_cell_id, + pre_segment_id, + pre_fraction_along, + post_cell_id, + post_segment_id, + post_fraction_along, + **kwargs_ + ) self.weight = _cast(float, weight) self.delay = _cast(None, delay) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ConnectionWD) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ConnectionWD) if subclass is not None: return subclass(*args_, **kwargs_) if ConnectionWD.subclass: return ConnectionWD.subclass(*args_, **kwargs_) else: return ConnectionWD(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(ConnectionWD, self).hasContent_() - ): + if super(ConnectionWD, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConnectionWD', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ConnectionWD') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ConnectionWD", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ConnectionWD") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConnectionWD') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ConnectionWD" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ConnectionWD', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ConnectionWD'): - super(ConnectionWD, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConnectionWD') - if self.weight is not None and 'weight' not in already_processed: - already_processed.add('weight') - outfile.write(' weight="%s"' % self.gds_format_float(self.weight, input_name='weight')) - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConnectionWD', fromsubclass_=False, pretty_print=True): - super(ConnectionWD, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ConnectionWD", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ConnectionWD", + ): + super(ConnectionWD, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ConnectionWD" + ) + if self.weight is not None and "weight" not in already_processed: + already_processed.add("weight") + outfile.write( + ' weight="%s"' % self.gds_format_float(self.weight, input_name="weight") + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write(" delay=%s" % (quote_attrib(self.delay),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ConnectionWD", + fromsubclass_=False, + pretty_print=True, + ): + super(ConnectionWD, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -21083,29 +38925,33 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('weight', node) - if value is not None and 'weight' not in already_processed: - already_processed.add('weight') + value = find_attr_value_("weight", node) + if value is not None and "weight" not in already_processed: + already_processed.add("weight") try: self.weight = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (weight): %s' % exp) - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + raise ValueError("Bad float/double attribute (weight): %s" % exp) + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time super(ConnectionWD, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ConnectionWD, self).buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): - if '[' in id_string: - return int(id_string.split('[')[1].split(']')[0]) + if "[" in id_string: + return int(id_string.split("[")[1].split("]")[0]) else: - return int(id_string.split('/')[2]) + return int(id_string.split("/")[2]) def get_pre_cell_id(self): """Get the ID of the pre-synaptic cell @@ -21153,25 +38999,58 @@ def get_post_fraction_along(self): return float(self.post_fraction_along) - def get_pre_info(self): """Get pre-synaptic information summary""" - return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') + return str(self.get_pre_cell_id()) + ( + ":" + + str(self.get_pre_segment_id()) + + "(" + + "%.5f" % self.get_pre_fraction_along() + + ")" + if self.get_pre_segment_id() != 0 or self.get_pre_fraction_along() != 0.5 + else "" + ) def get_post_info(self): """Get post-synaptic information summary""" - return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') + return str(self.get_post_cell_id()) + ( + ":" + + str(self.get_post_segment_id()) + + "(" + + "%.5f" % self.get_post_fraction_along() + + ")" + if self.get_post_segment_id() != 0 or self.get_post_fraction_along() != 0.5 + else "" + ) def __str__(self): - return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info()) + return ( + "Connection " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + ) - def __str__(self): - return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", weight: "+'%f' % (float(self.weight))+", delay: "+'%.5f' % (self.get_delay_in_ms())+" ms" + return ( + "Connection " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", weight: " + + "%f" % (float(self.weight)) + + ", delay: " + + "%.5f" % (self.get_delay_in_ms()) + + " ms" + ) def get_delay_in_ms(self): """Get connection delay in milli seconds @@ -21179,10 +39058,10 @@ def get_delay_in_ms(self): :returns: connection delay in milli seconds :rtype: float """ - if 'ms' in self.delay: + if "ms" in self.delay: return float(self.delay[:-2].strip()) - elif 's' in self.delay: - return float(self.delay[:-1].strip())*1000.0 + elif "s" in self.delay: + return float(self.delay[:-1].strip()) * 1000.0 # end class ConnectionWD @@ -21190,57 +39069,122 @@ def get_delay_in_ms(self): class Connection(BaseConnectionOldFormat): """Individual chemical (event based) synaptic connection, weight==1 and no delay""" - member_data_items_ = [ - ] + + member_data_items_ = [] subclass = None superclass = BaseConnectionOldFormat - def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id='0', pre_fraction_along='0.5', post_cell_id=None, post_segment_id='0', post_fraction_along='0.5', **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell_id=None, + pre_segment_id="0", + pre_fraction_along="0.5", + post_cell_id=None, + post_segment_id="0", + post_fraction_along="0.5", + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Connection, self).__init__(neuro_lex_id, id, pre_cell_id, pre_segment_id, pre_fraction_along, post_cell_id, post_segment_id, post_fraction_along, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Connection, self).__init__( + neuro_lex_id, + id, + pre_cell_id, + pre_segment_id, + pre_fraction_along, + post_cell_id, + post_segment_id, + post_fraction_along, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Connection) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Connection) if subclass is not None: return subclass(*args_, **kwargs_) if Connection.subclass: return Connection.subclass(*args_, **kwargs_) else: return Connection(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(Connection, self).hasContent_() - ): + if super(Connection, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Connection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Connection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Connection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Connection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Connection') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Connection" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Connection', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Connection'): - super(Connection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Connection') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Connection', fromsubclass_=False, pretty_print=True): - super(Connection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Connection", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Connection" + ): + super(Connection, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Connection" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Connection", + fromsubclass_=False, + pretty_print=True, + ): + super(Connection, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -21248,17 +39192,19 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(Connection, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(Connection, self).buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): - if '[' in id_string: - return int(id_string.split('[')[1].split(']')[0]) + if "[" in id_string: + return int(id_string.split("[")[1].split("]")[0]) else: - return int(id_string.split('/')[2]) + return int(id_string.split("/")[2]) def get_pre_cell_id(self): """Get the ID of the pre-synaptic cell @@ -21306,85 +39252,202 @@ def get_post_fraction_along(self): return float(self.post_fraction_along) - def get_pre_info(self): """Get pre-synaptic information summary""" - return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') + return str(self.get_pre_cell_id()) + ( + ":" + + str(self.get_pre_segment_id()) + + "(" + + "%.5f" % self.get_pre_fraction_along() + + ")" + if self.get_pre_segment_id() != 0 or self.get_pre_fraction_along() != 0.5 + else "" + ) def get_post_info(self): """Get post-synaptic information summary""" - return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') + return str(self.get_post_cell_id()) + ( + ":" + + str(self.get_post_segment_id()) + + "(" + + "%.5f" % self.get_post_fraction_along() + + ")" + if self.get_post_segment_id() != 0 or self.get_post_fraction_along() != 0.5 + else "" + ) def __str__(self): - return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info()) + return ( + "Connection " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + ) # end class Connection class Cell2CaPools(Cell): member_data_items_ = [ - MemberSpec_('biophysical_properties2_ca_pools', 'BiophysicalProperties2CaPools', 0, 1, {u'type': u'BiophysicalProperties2CaPools', u'name': u'biophysicalProperties2CaPools', u'minOccurs': u'0'}, None), + MemberSpec_( + "biophysical_properties2_ca_pools", + "BiophysicalProperties2CaPools", + 0, + 1, + { + u"type": u"BiophysicalProperties2CaPools", + u"name": u"biophysicalProperties2CaPools", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = Cell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, morphology_attr=None, biophysical_properties_attr=None, morphology=None, biophysical_properties=None, biophysical_properties2_ca_pools=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + morphology_attr=None, + biophysical_properties_attr=None, + morphology=None, + biophysical_properties=None, + biophysical_properties2_ca_pools=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Cell2CaPools, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, morphology_attr, biophysical_properties_attr, morphology, biophysical_properties, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Cell2CaPools, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + morphology_attr, + biophysical_properties_attr, + morphology, + biophysical_properties, + **kwargs_ + ) self.biophysical_properties2_ca_pools = biophysical_properties2_ca_pools + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Cell2CaPools) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Cell2CaPools) if subclass is not None: return subclass(*args_, **kwargs_) if Cell2CaPools.subclass: return Cell2CaPools.subclass(*args_, **kwargs_) else: return Cell2CaPools(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.biophysical_properties2_ca_pools is not None or - super(Cell2CaPools, self).hasContent_() + self.biophysical_properties2_ca_pools is not None + or super(Cell2CaPools, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Cell2CaPools', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Cell2CaPools') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Cell2CaPools", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Cell2CaPools") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell2CaPools') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Cell2CaPools" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Cell2CaPools', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Cell2CaPools", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Cell2CaPools'): - super(Cell2CaPools, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell2CaPools') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Cell2CaPools', fromsubclass_=False, pretty_print=True): - super(Cell2CaPools, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Cell2CaPools", + ): + super(Cell2CaPools, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Cell2CaPools" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Cell2CaPools", + fromsubclass_=False, + pretty_print=True, + ): + super(Cell2CaPools, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.biophysical_properties2_ca_pools is not None: - self.biophysical_properties2_ca_pools.export(outfile, level, namespaceprefix_, namespacedef_='', name_='biophysicalProperties2CaPools', pretty_print=pretty_print) + self.biophysical_properties2_ca_pools.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="biophysicalProperties2CaPools", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -21392,37 +39455,64 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(Cell2CaPools, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'biophysicalProperties2CaPools': + if nodeName_ == "biophysicalProperties2CaPools": obj_ = BiophysicalProperties2CaPools.factory(parent_object_=self) obj_.build(child_) self.biophysical_properties2_ca_pools = obj_ - obj_.original_tagname_ = 'biophysicalProperties2CaPools' + obj_.original_tagname_ = "biophysicalProperties2CaPools" super(Cell2CaPools, self).buildChildren(child_, node, nodeName_, True) + + # end class Cell2CaPools class AdExIaFCell(BaseCellMembPotCap): member_data_items_ = [ - MemberSpec_('g_l', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('EL', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('reset', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('VT', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('del_t', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('tauw', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('refract', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('a', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_("g_l", "Nml2Quantity_conductance", 0, 0, {"use": u"required"}), + MemberSpec_("EL", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("reset", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("VT", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("thresh", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("del_t", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("tauw", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("refract", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("a", "Nml2Quantity_conductance", 0, 0, {"use": u"required"}), + MemberSpec_("b", "Nml2Quantity_current", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseCellMembPotCap - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, C=None, g_l=None, EL=None, reset=None, VT=None, thresh=None, del_t=None, tauw=None, refract=None, a=None, b=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + C=None, + g_l=None, + EL=None, + reset=None, + VT=None, + thresh=None, + del_t=None, + tauw=None, + refract=None, + a=None, + b=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(AdExIaFCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, C, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(AdExIaFCell, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, C, **kwargs_ + ) self.g_l = _cast(None, g_l) self.EL = _cast(None, EL) self.reset = _cast(None, reset) @@ -21433,107 +39523,199 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti self.refract = _cast(None, refract) self.a = _cast(None, a) self.b = _cast(None, b) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, AdExIaFCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, AdExIaFCell) if subclass is not None: return subclass(*args_, **kwargs_) if AdExIaFCell.subclass: return AdExIaFCell.subclass(*args_, **kwargs_) else: return AdExIaFCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] + self.validate_Nml2Quantity_current_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_current_patterns_, + ) + ) + + validate_Nml2Quantity_current_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$"] + ] + def hasContent_(self): - if ( - super(AdExIaFCell, self).hasContent_() - ): + if super(AdExIaFCell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AdExIaFCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('AdExIaFCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AdExIaFCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("AdExIaFCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdExIaFCell') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="AdExIaFCell" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AdExIaFCell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="AdExIaFCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AdExIaFCell'): - super(AdExIaFCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdExIaFCell') - if self.g_l is not None and 'g_l' not in already_processed: - already_processed.add('g_l') - outfile.write(' gL=%s' % (quote_attrib(self.g_l), )) - if self.EL is not None and 'EL' not in already_processed: - already_processed.add('EL') - outfile.write(' EL=%s' % (quote_attrib(self.EL), )) - if self.reset is not None and 'reset' not in already_processed: - already_processed.add('reset') - outfile.write(' reset=%s' % (quote_attrib(self.reset), )) - if self.VT is not None and 'VT' not in already_processed: - already_processed.add('VT') - outfile.write(' VT=%s' % (quote_attrib(self.VT), )) - if self.thresh is not None and 'thresh' not in already_processed: - already_processed.add('thresh') - outfile.write(' thresh=%s' % (quote_attrib(self.thresh), )) - if self.del_t is not None and 'del_t' not in already_processed: - already_processed.add('del_t') - outfile.write(' delT=%s' % (quote_attrib(self.del_t), )) - if self.tauw is not None and 'tauw' not in already_processed: - already_processed.add('tauw') - outfile.write(' tauw=%s' % (quote_attrib(self.tauw), )) - if self.refract is not None and 'refract' not in already_processed: - already_processed.add('refract') - outfile.write(' refract=%s' % (quote_attrib(self.refract), )) - if self.a is not None and 'a' not in already_processed: - already_processed.add('a') - outfile.write(' a=%s' % (quote_attrib(self.a), )) - if self.b is not None and 'b' not in already_processed: - already_processed.add('b') - outfile.write(' b=%s' % (quote_attrib(self.b), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AdExIaFCell', fromsubclass_=False, pretty_print=True): - super(AdExIaFCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="AdExIaFCell", + ): + super(AdExIaFCell, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="AdExIaFCell" + ) + if self.g_l is not None and "g_l" not in already_processed: + already_processed.add("g_l") + outfile.write(" gL=%s" % (quote_attrib(self.g_l),)) + if self.EL is not None and "EL" not in already_processed: + already_processed.add("EL") + outfile.write(" EL=%s" % (quote_attrib(self.EL),)) + if self.reset is not None and "reset" not in already_processed: + already_processed.add("reset") + outfile.write(" reset=%s" % (quote_attrib(self.reset),)) + if self.VT is not None and "VT" not in already_processed: + already_processed.add("VT") + outfile.write(" VT=%s" % (quote_attrib(self.VT),)) + if self.thresh is not None and "thresh" not in already_processed: + already_processed.add("thresh") + outfile.write(" thresh=%s" % (quote_attrib(self.thresh),)) + if self.del_t is not None and "del_t" not in already_processed: + already_processed.add("del_t") + outfile.write(" delT=%s" % (quote_attrib(self.del_t),)) + if self.tauw is not None and "tauw" not in already_processed: + already_processed.add("tauw") + outfile.write(" tauw=%s" % (quote_attrib(self.tauw),)) + if self.refract is not None and "refract" not in already_processed: + already_processed.add("refract") + outfile.write(" refract=%s" % (quote_attrib(self.refract),)) + if self.a is not None and "a" not in already_processed: + already_processed.add("a") + outfile.write(" a=%s" % (quote_attrib(self.a),)) + if self.b is not None and "b" not in already_processed: + already_processed.add("b") + outfile.write(" b=%s" % (quote_attrib(self.b),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AdExIaFCell", + fromsubclass_=False, + pretty_print=True, + ): + super(AdExIaFCell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -21541,82 +39723,130 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('gL', node) - if value is not None and 'gL' not in already_processed: - already_processed.add('gL') + value = find_attr_value_("gL", node) + if value is not None and "gL" not in already_processed: + already_processed.add("gL") self.g_l = value - self.validate_Nml2Quantity_conductance(self.g_l) # validate type Nml2Quantity_conductance - value = find_attr_value_('EL', node) - if value is not None and 'EL' not in already_processed: - already_processed.add('EL') + self.validate_Nml2Quantity_conductance( + self.g_l + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("EL", node) + if value is not None and "EL" not in already_processed: + already_processed.add("EL") self.EL = value - self.validate_Nml2Quantity_voltage(self.EL) # validate type Nml2Quantity_voltage - value = find_attr_value_('reset', node) - if value is not None and 'reset' not in already_processed: - already_processed.add('reset') + self.validate_Nml2Quantity_voltage( + self.EL + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("reset", node) + if value is not None and "reset" not in already_processed: + already_processed.add("reset") self.reset = value - self.validate_Nml2Quantity_voltage(self.reset) # validate type Nml2Quantity_voltage - value = find_attr_value_('VT', node) - if value is not None and 'VT' not in already_processed: - already_processed.add('VT') + self.validate_Nml2Quantity_voltage( + self.reset + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("VT", node) + if value is not None and "VT" not in already_processed: + already_processed.add("VT") self.VT = value - self.validate_Nml2Quantity_voltage(self.VT) # validate type Nml2Quantity_voltage - value = find_attr_value_('thresh', node) - if value is not None and 'thresh' not in already_processed: - already_processed.add('thresh') + self.validate_Nml2Quantity_voltage( + self.VT + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("thresh", node) + if value is not None and "thresh" not in already_processed: + already_processed.add("thresh") self.thresh = value - self.validate_Nml2Quantity_voltage(self.thresh) # validate type Nml2Quantity_voltage - value = find_attr_value_('delT', node) - if value is not None and 'delT' not in already_processed: - already_processed.add('delT') + self.validate_Nml2Quantity_voltage( + self.thresh + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("delT", node) + if value is not None and "delT" not in already_processed: + already_processed.add("delT") self.del_t = value - self.validate_Nml2Quantity_voltage(self.del_t) # validate type Nml2Quantity_voltage - value = find_attr_value_('tauw', node) - if value is not None and 'tauw' not in already_processed: - already_processed.add('tauw') + self.validate_Nml2Quantity_voltage( + self.del_t + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("tauw", node) + if value is not None and "tauw" not in already_processed: + already_processed.add("tauw") self.tauw = value - self.validate_Nml2Quantity_time(self.tauw) # validate type Nml2Quantity_time - value = find_attr_value_('refract', node) - if value is not None and 'refract' not in already_processed: - already_processed.add('refract') + self.validate_Nml2Quantity_time( + self.tauw + ) # validate type Nml2Quantity_time + value = find_attr_value_("refract", node) + if value is not None and "refract" not in already_processed: + already_processed.add("refract") self.refract = value - self.validate_Nml2Quantity_time(self.refract) # validate type Nml2Quantity_time - value = find_attr_value_('a', node) - if value is not None and 'a' not in already_processed: - already_processed.add('a') + self.validate_Nml2Quantity_time( + self.refract + ) # validate type Nml2Quantity_time + value = find_attr_value_("a", node) + if value is not None and "a" not in already_processed: + already_processed.add("a") self.a = value - self.validate_Nml2Quantity_conductance(self.a) # validate type Nml2Quantity_conductance - value = find_attr_value_('b', node) - if value is not None and 'b' not in already_processed: - already_processed.add('b') + self.validate_Nml2Quantity_conductance( + self.a + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("b", node) + if value is not None and "b" not in already_processed: + already_processed.add("b") self.b = value - self.validate_Nml2Quantity_current(self.b) # validate type Nml2Quantity_current + self.validate_Nml2Quantity_current( + self.b + ) # validate type Nml2Quantity_current super(AdExIaFCell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(AdExIaFCell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class AdExIaFCell class Izhikevich2007Cell(BaseCellMembPotCap): member_data_items_ = [ - MemberSpec_('v0', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('k', 'Nml2Quantity_conductancePerVoltage', 0, 0, {'use': u'required'}), - MemberSpec_('vr', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('vt', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('vpeak', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('a', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('c', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('d', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_("v0", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_( + "k", "Nml2Quantity_conductancePerVoltage", 0, 0, {"use": u"required"} + ), + MemberSpec_("vr", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("vt", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("vpeak", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("a", "Nml2Quantity_pertime", 0, 0, {"use": u"required"}), + MemberSpec_("b", "Nml2Quantity_conductance", 0, 0, {"use": u"required"}), + MemberSpec_("c", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), + MemberSpec_("d", "Nml2Quantity_current", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseCellMembPotCap - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, C=None, v0=None, k=None, vr=None, vt=None, vpeak=None, a=None, b=None, c=None, d=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + C=None, + v0=None, + k=None, + vr=None, + vt=None, + vpeak=None, + a=None, + b=None, + c=None, + d=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Izhikevich2007Cell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, C, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(Izhikevich2007Cell, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, C, **kwargs_ + ) self.v0 = _cast(None, v0) self.k = _cast(None, k) self.vr = _cast(None, vr) @@ -21626,111 +39856,224 @@ def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properti self.b = _cast(None, b) self.c = _cast(None, c) self.d = _cast(None, d) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, Izhikevich2007Cell) + CurrentSubclassModule_, Izhikevich2007Cell + ) if subclass is not None: return subclass(*args_, **kwargs_) if Izhikevich2007Cell.subclass: return Izhikevich2007Cell.subclass(*args_, **kwargs_) else: return Izhikevich2007Cell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def validate_Nml2Quantity_conductancePerVoltage(self, value): # Validate type Nml2Quantity_conductancePerVoltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductancePerVoltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductancePerVoltage_patterns_, )) - validate_Nml2Quantity_conductancePerVoltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_V|nS_per_mV)$']] + self.validate_Nml2Quantity_conductancePerVoltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductancePerVoltage_patterns_, + ) + ) + + validate_Nml2Quantity_conductancePerVoltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_V|nS_per_mV)$"] + ] + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.validate_Nml2Quantity_pertime_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$"] + ] + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$"] + ] + def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] + self.validate_Nml2Quantity_current_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_current_patterns_, + ) + ) + + validate_Nml2Quantity_current_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$"] + ] + def hasContent_(self): - if ( - super(Izhikevich2007Cell, self).hasContent_() - ): + if super(Izhikevich2007Cell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Izhikevich2007Cell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Izhikevich2007Cell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Izhikevich2007Cell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Izhikevich2007Cell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Izhikevich2007Cell') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="Izhikevich2007Cell", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Izhikevich2007Cell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Izhikevich2007Cell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Izhikevich2007Cell'): - super(Izhikevich2007Cell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Izhikevich2007Cell') - if self.v0 is not None and 'v0' not in already_processed: - already_processed.add('v0') - outfile.write(' v0=%s' % (quote_attrib(self.v0), )) - if self.k is not None and 'k' not in already_processed: - already_processed.add('k') - outfile.write(' k=%s' % (quote_attrib(self.k), )) - if self.vr is not None and 'vr' not in already_processed: - already_processed.add('vr') - outfile.write(' vr=%s' % (quote_attrib(self.vr), )) - if self.vt is not None and 'vt' not in already_processed: - already_processed.add('vt') - outfile.write(' vt=%s' % (quote_attrib(self.vt), )) - if self.vpeak is not None and 'vpeak' not in already_processed: - already_processed.add('vpeak') - outfile.write(' vpeak=%s' % (quote_attrib(self.vpeak), )) - if self.a is not None and 'a' not in already_processed: - already_processed.add('a') - outfile.write(' a=%s' % (quote_attrib(self.a), )) - if self.b is not None and 'b' not in already_processed: - already_processed.add('b') - outfile.write(' b=%s' % (quote_attrib(self.b), )) - if self.c is not None and 'c' not in already_processed: - already_processed.add('c') - outfile.write(' c=%s' % (quote_attrib(self.c), )) - if self.d is not None and 'd' not in already_processed: - already_processed.add('d') - outfile.write(' d=%s' % (quote_attrib(self.d), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Izhikevich2007Cell', fromsubclass_=False, pretty_print=True): - super(Izhikevich2007Cell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Izhikevich2007Cell", + ): + super(Izhikevich2007Cell, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="Izhikevich2007Cell", + ) + if self.v0 is not None and "v0" not in already_processed: + already_processed.add("v0") + outfile.write(" v0=%s" % (quote_attrib(self.v0),)) + if self.k is not None and "k" not in already_processed: + already_processed.add("k") + outfile.write(" k=%s" % (quote_attrib(self.k),)) + if self.vr is not None and "vr" not in already_processed: + already_processed.add("vr") + outfile.write(" vr=%s" % (quote_attrib(self.vr),)) + if self.vt is not None and "vt" not in already_processed: + already_processed.add("vt") + outfile.write(" vt=%s" % (quote_attrib(self.vt),)) + if self.vpeak is not None and "vpeak" not in already_processed: + already_processed.add("vpeak") + outfile.write(" vpeak=%s" % (quote_attrib(self.vpeak),)) + if self.a is not None and "a" not in already_processed: + already_processed.add("a") + outfile.write(" a=%s" % (quote_attrib(self.a),)) + if self.b is not None and "b" not in already_processed: + already_processed.add("b") + outfile.write(" b=%s" % (quote_attrib(self.b),)) + if self.c is not None and "c" not in already_processed: + already_processed.add("c") + outfile.write(" c=%s" % (quote_attrib(self.c),)) + if self.d is not None and "d" not in already_processed: + already_processed.add("d") + outfile.write(" d=%s" % (quote_attrib(self.d),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Izhikevich2007Cell", + fromsubclass_=False, + pretty_print=True, + ): + super(Izhikevich2007Cell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -21738,123 +40081,228 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('v0', node) - if value is not None and 'v0' not in already_processed: - already_processed.add('v0') + value = find_attr_value_("v0", node) + if value is not None and "v0" not in already_processed: + already_processed.add("v0") self.v0 = value - self.validate_Nml2Quantity_voltage(self.v0) # validate type Nml2Quantity_voltage - value = find_attr_value_('k', node) - if value is not None and 'k' not in already_processed: - already_processed.add('k') + self.validate_Nml2Quantity_voltage( + self.v0 + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("k", node) + if value is not None and "k" not in already_processed: + already_processed.add("k") self.k = value - self.validate_Nml2Quantity_conductancePerVoltage(self.k) # validate type Nml2Quantity_conductancePerVoltage - value = find_attr_value_('vr', node) - if value is not None and 'vr' not in already_processed: - already_processed.add('vr') + self.validate_Nml2Quantity_conductancePerVoltage( + self.k + ) # validate type Nml2Quantity_conductancePerVoltage + value = find_attr_value_("vr", node) + if value is not None and "vr" not in already_processed: + already_processed.add("vr") self.vr = value - self.validate_Nml2Quantity_voltage(self.vr) # validate type Nml2Quantity_voltage - value = find_attr_value_('vt', node) - if value is not None and 'vt' not in already_processed: - already_processed.add('vt') + self.validate_Nml2Quantity_voltage( + self.vr + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("vt", node) + if value is not None and "vt" not in already_processed: + already_processed.add("vt") self.vt = value - self.validate_Nml2Quantity_voltage(self.vt) # validate type Nml2Quantity_voltage - value = find_attr_value_('vpeak', node) - if value is not None and 'vpeak' not in already_processed: - already_processed.add('vpeak') + self.validate_Nml2Quantity_voltage( + self.vt + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("vpeak", node) + if value is not None and "vpeak" not in already_processed: + already_processed.add("vpeak") self.vpeak = value - self.validate_Nml2Quantity_voltage(self.vpeak) # validate type Nml2Quantity_voltage - value = find_attr_value_('a', node) - if value is not None and 'a' not in already_processed: - already_processed.add('a') + self.validate_Nml2Quantity_voltage( + self.vpeak + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("a", node) + if value is not None and "a" not in already_processed: + already_processed.add("a") self.a = value - self.validate_Nml2Quantity_pertime(self.a) # validate type Nml2Quantity_pertime - value = find_attr_value_('b', node) - if value is not None and 'b' not in already_processed: - already_processed.add('b') + self.validate_Nml2Quantity_pertime( + self.a + ) # validate type Nml2Quantity_pertime + value = find_attr_value_("b", node) + if value is not None and "b" not in already_processed: + already_processed.add("b") self.b = value - self.validate_Nml2Quantity_conductance(self.b) # validate type Nml2Quantity_conductance - value = find_attr_value_('c', node) - if value is not None and 'c' not in already_processed: - already_processed.add('c') + self.validate_Nml2Quantity_conductance( + self.b + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("c", node) + if value is not None and "c" not in already_processed: + already_processed.add("c") self.c = value - self.validate_Nml2Quantity_voltage(self.c) # validate type Nml2Quantity_voltage - value = find_attr_value_('d', node) - if value is not None and 'd' not in already_processed: - already_processed.add('d') + self.validate_Nml2Quantity_voltage( + self.c + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("d", node) + if value is not None and "d" not in already_processed: + already_processed.add("d") self.d = value - self.validate_Nml2Quantity_current(self.d) # validate type Nml2Quantity_current + self.validate_Nml2Quantity_current( + self.d + ) # validate type Nml2Quantity_current super(Izhikevich2007Cell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(Izhikevich2007Cell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class Izhikevich2007Cell class IafRefCell(IafCell): member_data_items_ = [ - MemberSpec_('refract', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("refract", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = IafCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, C=None, leak_conductance=None, refract=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + leak_reversal=None, + thresh=None, + reset=None, + C=None, + leak_conductance=None, + refract=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IafRefCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, leak_reversal, thresh, reset, C, leak_conductance, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IafRefCell, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + leak_reversal, + thresh, + reset, + C, + leak_conductance, + **kwargs_ + ) self.refract = _cast(None, refract) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IafRefCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IafRefCell) if subclass is not None: return subclass(*args_, **kwargs_) if IafRefCell.subclass: return IafRefCell.subclass(*args_, **kwargs_) else: return IafRefCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(IafRefCell, self).hasContent_() - ): + if super(IafRefCell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafRefCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IafRefCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafRefCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IafRefCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafRefCell') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafRefCell" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafRefCell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IafRefCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafRefCell'): - super(IafRefCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafRefCell') - if self.refract is not None and 'refract' not in already_processed: - already_processed.add('refract') - outfile.write(' refract=%s' % (quote_attrib(self.refract), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafRefCell', fromsubclass_=False, pretty_print=True): - super(IafRefCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="IafRefCell" + ): + super(IafRefCell, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafRefCell" + ) + if self.refract is not None and "refract" not in already_processed: + already_processed.add("refract") + outfile.write(" refract=%s" % (quote_attrib(self.refract),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafRefCell", + fromsubclass_=False, + pretty_print=True, + ): + super(IafRefCell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -21862,83 +40310,175 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('refract', node) - if value is not None and 'refract' not in already_processed: - already_processed.add('refract') + value = find_attr_value_("refract", node) + if value is not None and "refract" not in already_processed: + already_processed.add("refract") self.refract = value - self.validate_Nml2Quantity_time(self.refract) # validate type Nml2Quantity_time + self.validate_Nml2Quantity_time( + self.refract + ) # validate type Nml2Quantity_time super(IafRefCell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(IafRefCell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class IafRefCell class IafTauRefCell(IafTauCell): member_data_items_ = [ - MemberSpec_('refract', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("refract", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = IafTauCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, tau=None, refract=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + leak_reversal=None, + thresh=None, + reset=None, + tau=None, + refract=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IafTauRefCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, leak_reversal, thresh, reset, tau, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IafTauRefCell, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + leak_reversal, + thresh, + reset, + tau, + **kwargs_ + ) self.refract = _cast(None, refract) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IafTauRefCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IafTauRefCell) if subclass is not None: return subclass(*args_, **kwargs_) if IafTauRefCell.subclass: return IafTauRefCell.subclass(*args_, **kwargs_) else: return IafTauRefCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(IafTauRefCell, self).hasContent_() - ): + if super(IafTauRefCell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafTauRefCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IafTauRefCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafTauRefCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IafTauRefCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauRefCell') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafTauRefCell" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafTauRefCell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IafTauRefCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafTauRefCell'): - super(IafTauRefCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauRefCell') - if self.refract is not None and 'refract' not in already_processed: - already_processed.add('refract') - outfile.write(' refract=%s' % (quote_attrib(self.refract), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafTauRefCell', fromsubclass_=False, pretty_print=True): - super(IafTauRefCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IafTauRefCell", + ): + super(IafTauRefCell, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafTauRefCell" + ) + if self.refract is not None and "refract" not in already_processed: + already_processed.add("refract") + outfile.write(" refract=%s" % (quote_attrib(self.refract),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafTauRefCell", + fromsubclass_=False, + pretty_print=True, + ): + super(IafTauRefCell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -21946,98 +40486,195 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('refract', node) - if value is not None and 'refract' not in already_processed: - already_processed.add('refract') + value = find_attr_value_("refract", node) + if value is not None and "refract" not in already_processed: + already_processed.add("refract") self.refract = value - self.validate_Nml2Quantity_time(self.refract) # validate type Nml2Quantity_time + self.validate_Nml2Quantity_time( + self.refract + ) # validate type Nml2Quantity_time super(IafTauRefCell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(IafTauRefCell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class IafTauRefCell class DoubleSynapse(BaseVoltageDepSynapse): member_data_items_ = [ - MemberSpec_('synapse1', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('synapse2', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('synapse1_path', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('synapse2_path', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_("synapse1", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("synapse2", "NmlId", 0, 0, {"use": u"required"}), + MemberSpec_("synapse1_path", "xs:string", 0, 0, {"use": u"required"}), + MemberSpec_("synapse2_path", "xs:string", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseVoltageDepSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, synapse1=None, synapse2=None, synapse1_path=None, synapse2_path=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + synapse1=None, + synapse2=None, + synapse1_path=None, + synapse2_path=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(DoubleSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(DoubleSynapse, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.synapse1 = _cast(None, synapse1) self.synapse2 = _cast(None, synapse2) self.synapse1_path = _cast(None, synapse1_path) self.synapse2_path = _cast(None, synapse2_path) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, DoubleSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, DoubleSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if DoubleSynapse.subclass: return DoubleSynapse.subclass(*args_, **kwargs_) else: return DoubleSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [[u"^[a-zA-Z_][a-zA-Z0-9_]*$"]] + def hasContent_(self): - if ( - super(DoubleSynapse, self).hasContent_() - ): + if super(DoubleSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DoubleSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('DoubleSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DoubleSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("DoubleSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DoubleSynapse') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="DoubleSynapse" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DoubleSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="DoubleSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DoubleSynapse'): - super(DoubleSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DoubleSynapse') - if self.synapse1 is not None and 'synapse1' not in already_processed: - already_processed.add('synapse1') - outfile.write(' synapse1=%s' % (quote_attrib(self.synapse1), )) - if self.synapse2 is not None and 'synapse2' not in already_processed: - already_processed.add('synapse2') - outfile.write(' synapse2=%s' % (quote_attrib(self.synapse2), )) - if self.synapse1_path is not None and 'synapse1_path' not in already_processed: - already_processed.add('synapse1_path') - outfile.write(' synapse1Path=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse1_path), input_name='synapse1Path')), )) - if self.synapse2_path is not None and 'synapse2_path' not in already_processed: - already_processed.add('synapse2_path') - outfile.write(' synapse2Path=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse2_path), input_name='synapse2Path')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DoubleSynapse', fromsubclass_=False, pretty_print=True): - super(DoubleSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="DoubleSynapse", + ): + super(DoubleSynapse, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="DoubleSynapse" + ) + if self.synapse1 is not None and "synapse1" not in already_processed: + already_processed.add("synapse1") + outfile.write(" synapse1=%s" % (quote_attrib(self.synapse1),)) + if self.synapse2 is not None and "synapse2" not in already_processed: + already_processed.add("synapse2") + outfile.write(" synapse2=%s" % (quote_attrib(self.synapse2),)) + if self.synapse1_path is not None and "synapse1_path" not in already_processed: + already_processed.add("synapse1_path") + outfile.write( + " synapse1Path=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse1_path), input_name="synapse1Path" + ) + ), + ) + ) + if self.synapse2_path is not None and "synapse2_path" not in already_processed: + already_processed.add("synapse2_path") + outfile.write( + " synapse2Path=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse2_path), input_name="synapse2Path" + ) + ), + ) + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DoubleSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(DoubleSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -22045,108 +40682,206 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('synapse1', node) - if value is not None and 'synapse1' not in already_processed: - already_processed.add('synapse1') + value = find_attr_value_("synapse1", node) + if value is not None and "synapse1" not in already_processed: + already_processed.add("synapse1") self.synapse1 = value - self.validate_NmlId(self.synapse1) # validate type NmlId - value = find_attr_value_('synapse2', node) - if value is not None and 'synapse2' not in already_processed: - already_processed.add('synapse2') + self.validate_NmlId(self.synapse1) # validate type NmlId + value = find_attr_value_("synapse2", node) + if value is not None and "synapse2" not in already_processed: + already_processed.add("synapse2") self.synapse2 = value - self.validate_NmlId(self.synapse2) # validate type NmlId - value = find_attr_value_('synapse1Path', node) - if value is not None and 'synapse1Path' not in already_processed: - already_processed.add('synapse1Path') + self.validate_NmlId(self.synapse2) # validate type NmlId + value = find_attr_value_("synapse1Path", node) + if value is not None and "synapse1Path" not in already_processed: + already_processed.add("synapse1Path") self.synapse1_path = value - value = find_attr_value_('synapse2Path', node) - if value is not None and 'synapse2Path' not in already_processed: - already_processed.add('synapse2Path') + value = find_attr_value_("synapse2Path", node) + if value is not None and "synapse2Path" not in already_processed: + already_processed.add("synapse2Path") self.synapse2_path = value super(DoubleSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(DoubleSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class DoubleSynapse class AlphaCurrentSynapse(BaseCurrentBasedSynapse): member_data_items_ = [ - MemberSpec_('tau', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('ibase', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_("tau", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("ibase", "Nml2Quantity_current", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseCurrentBasedSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau=None, ibase=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + tau=None, + ibase=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(AlphaCurrentSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(AlphaCurrentSynapse, self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.tau = _cast(None, tau) self.ibase = _cast(None, ibase) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, AlphaCurrentSynapse) + CurrentSubclassModule_, AlphaCurrentSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if AlphaCurrentSynapse.subclass: return AlphaCurrentSynapse.subclass(*args_, **kwargs_) else: return AlphaCurrentSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] + self.validate_Nml2Quantity_current_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_current_patterns_, + ) + ) + + validate_Nml2Quantity_current_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$"] + ] + def hasContent_(self): - if ( - super(AlphaCurrentSynapse, self).hasContent_() - ): + if super(AlphaCurrentSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCurrentSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('AlphaCurrentSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaCurrentSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("AlphaCurrentSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrentSynapse') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="AlphaCurrentSynapse", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaCurrentSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="AlphaCurrentSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaCurrentSynapse'): - super(AlphaCurrentSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrentSynapse') - if self.tau is not None and 'tau' not in already_processed: - already_processed.add('tau') - outfile.write(' tau=%s' % (quote_attrib(self.tau), )) - if self.ibase is not None and 'ibase' not in already_processed: - already_processed.add('ibase') - outfile.write(' ibase=%s' % (quote_attrib(self.ibase), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCurrentSynapse', fromsubclass_=False, pretty_print=True): - super(AlphaCurrentSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="AlphaCurrentSynapse", + ): + super(AlphaCurrentSynapse, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="AlphaCurrentSynapse", + ) + if self.tau is not None and "tau" not in already_processed: + already_processed.add("tau") + outfile.write(" tau=%s" % (quote_attrib(self.tau),)) + if self.ibase is not None and "ibase" not in already_processed: + already_processed.add("ibase") + outfile.write(" ibase=%s" % (quote_attrib(self.ibase),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaCurrentSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(AlphaCurrentSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -22154,110 +40889,221 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tau', node) - if value is not None and 'tau' not in already_processed: - already_processed.add('tau') + value = find_attr_value_("tau", node) + if value is not None and "tau" not in already_processed: + already_processed.add("tau") self.tau = value - self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time - value = find_attr_value_('ibase', node) - if value is not None and 'ibase' not in already_processed: - already_processed.add('ibase') + self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time + value = find_attr_value_("ibase", node) + if value is not None and "ibase" not in already_processed: + already_processed.add("ibase") self.ibase = value - self.validate_Nml2Quantity_current(self.ibase) # validate type Nml2Quantity_current + self.validate_Nml2Quantity_current( + self.ibase + ) # validate type Nml2Quantity_current super(AlphaCurrentSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(AlphaCurrentSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class AlphaCurrentSynapse class BaseConductanceBasedSynapseTwo(BaseVoltageDepSynapse): member_data_items_ = [ - MemberSpec_('gbase1', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('gbase2', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_("gbase1", "Nml2Quantity_conductance", 0, 0, {"use": u"required"}), + MemberSpec_("gbase2", "Nml2Quantity_conductance", 0, 0, {"use": u"required"}), + MemberSpec_("erev", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseVoltageDepSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase1=None, gbase2=None, erev=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase1=None, + gbase2=None, + erev=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConductanceBasedSynapseTwo, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BaseConductanceBasedSynapseTwo, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.gbase1 = _cast(None, gbase1) self.gbase2 = _cast(None, gbase2) self.erev = _cast(None, erev) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseConductanceBasedSynapseTwo) + CurrentSubclassModule_, BaseConductanceBasedSynapseTwo + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseConductanceBasedSynapseTwo.subclass: return BaseConductanceBasedSynapseTwo.subclass(*args_, **kwargs_) else: return BaseConductanceBasedSynapseTwo(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def hasContent_(self): - if ( - super(BaseConductanceBasedSynapseTwo, self).hasContent_() - ): + if super(BaseConductanceBasedSynapseTwo, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConductanceBasedSynapseTwo', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseConductanceBasedSynapseTwo') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConductanceBasedSynapseTwo", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get( + "BaseConductanceBasedSynapseTwo" + ) if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapseTwo') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConductanceBasedSynapseTwo", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConductanceBasedSynapseTwo', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseConductanceBasedSynapseTwo", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConductanceBasedSynapseTwo'): - super(BaseConductanceBasedSynapseTwo, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapseTwo') - if self.gbase1 is not None and 'gbase1' not in already_processed: - already_processed.add('gbase1') - outfile.write(' gbase1=%s' % (quote_attrib(self.gbase1), )) - if self.gbase2 is not None and 'gbase2' not in already_processed: - already_processed.add('gbase2') - outfile.write(' gbase2=%s' % (quote_attrib(self.gbase2), )) - if self.erev is not None and 'erev' not in already_processed: - already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseConductanceBasedSynapseTwo", + ): + super(BaseConductanceBasedSynapseTwo, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConductanceBasedSynapseTwo", + ) + if self.gbase1 is not None and "gbase1" not in already_processed: + already_processed.add("gbase1") + outfile.write(" gbase1=%s" % (quote_attrib(self.gbase1),)) + if self.gbase2 is not None and "gbase2" not in already_processed: + already_processed.add("gbase2") + outfile.write(" gbase2=%s" % (quote_attrib(self.gbase2),)) + if self.erev is not None and "erev" not in already_processed: + already_processed.add("erev") + outfile.write(" erev=%s" % (quote_attrib(self.erev),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConductanceBasedSynapseTwo', fromsubclass_=False, pretty_print=True): - super(BaseConductanceBasedSynapseTwo, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConductanceBasedSynapseTwo", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseConductanceBasedSynapseTwo, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -22265,114 +41111,230 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('gbase1', node) - if value is not None and 'gbase1' not in already_processed: - already_processed.add('gbase1') + value = find_attr_value_("gbase1", node) + if value is not None and "gbase1" not in already_processed: + already_processed.add("gbase1") self.gbase1 = value - self.validate_Nml2Quantity_conductance(self.gbase1) # validate type Nml2Quantity_conductance - value = find_attr_value_('gbase2', node) - if value is not None and 'gbase2' not in already_processed: - already_processed.add('gbase2') + self.validate_Nml2Quantity_conductance( + self.gbase1 + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("gbase2", node) + if value is not None and "gbase2" not in already_processed: + already_processed.add("gbase2") self.gbase2 = value - self.validate_Nml2Quantity_conductance(self.gbase2) # validate type Nml2Quantity_conductance - value = find_attr_value_('erev', node) - if value is not None and 'erev' not in already_processed: - already_processed.add('erev') + self.validate_Nml2Quantity_conductance( + self.gbase2 + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("erev", node) + if value is not None and "erev" not in already_processed: + already_processed.add("erev") self.erev = value - self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_voltage( + self.erev + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseConductanceBasedSynapseTwo, self).buildAttributes(node, attrs, already_processed) + super(BaseConductanceBasedSynapseTwo, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConductanceBasedSynapseTwo, self).buildChildren(child_, node, nodeName_, True) + super(BaseConductanceBasedSynapseTwo, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class BaseConductanceBasedSynapseTwo class BaseConductanceBasedSynapse(BaseVoltageDepSynapse): member_data_items_ = [ - MemberSpec_('gbase', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_("gbase", "Nml2Quantity_conductance", 0, 0, {"use": u"required"}), + MemberSpec_("erev", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseVoltageDepSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase=None, + erev=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConductanceBasedSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BaseConductanceBasedSynapse, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.gbase = _cast(None, gbase) self.erev = _cast(None, erev) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseConductanceBasedSynapse) + CurrentSubclassModule_, BaseConductanceBasedSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseConductanceBasedSynapse.subclass: return BaseConductanceBasedSynapse.subclass(*args_, **kwargs_) else: return BaseConductanceBasedSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def hasContent_(self): - if ( - super(BaseConductanceBasedSynapse, self).hasContent_() - ): + if super(BaseConductanceBasedSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConductanceBasedSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseConductanceBasedSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConductanceBasedSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseConductanceBasedSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapse') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConductanceBasedSynapse", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConductanceBasedSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseConductanceBasedSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConductanceBasedSynapse'): - super(BaseConductanceBasedSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapse') - if self.gbase is not None and 'gbase' not in already_processed: - already_processed.add('gbase') - outfile.write(' gbase=%s' % (quote_attrib(self.gbase), )) - if self.erev is not None and 'erev' not in already_processed: - already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseConductanceBasedSynapse", + ): + super(BaseConductanceBasedSynapse, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConductanceBasedSynapse", + ) + if self.gbase is not None and "gbase" not in already_processed: + already_processed.add("gbase") + outfile.write(" gbase=%s" % (quote_attrib(self.gbase),)) + if self.erev is not None and "erev" not in already_processed: + already_processed.add("erev") + outfile.write(" erev=%s" % (quote_attrib(self.erev),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConductanceBasedSynapse', fromsubclass_=False, pretty_print=True): - super(BaseConductanceBasedSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConductanceBasedSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseConductanceBasedSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -22380,25 +41342,37 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('gbase', node) - if value is not None and 'gbase' not in already_processed: - already_processed.add('gbase') + value = find_attr_value_("gbase", node) + if value is not None and "gbase" not in already_processed: + already_processed.add("gbase") self.gbase = value - self.validate_Nml2Quantity_conductance(self.gbase) # validate type Nml2Quantity_conductance - value = find_attr_value_('erev', node) - if value is not None and 'erev' not in already_processed: - already_processed.add('erev') + self.validate_Nml2Quantity_conductance( + self.gbase + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("erev", node) + if value is not None and "erev" not in already_processed: + already_processed.add("erev") self.erev = value - self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_voltage( + self.erev + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseConductanceBasedSynapse, self).buildAttributes(node, attrs, already_processed) + super(BaseConductanceBasedSynapse, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConductanceBasedSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BaseConductanceBasedSynapse, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class BaseConductanceBasedSynapse @@ -22406,69 +41380,180 @@ class IonChannelVShift(IonChannel): """Same as ionChannel, but with a vShift parameter to change voltage activation of gates. The exact usage of vShift in expressions for rates is determined by the individual gates.""" + member_data_items_ = [ - MemberSpec_('v_shift', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_("v_shift", "Nml2Quantity_voltage", 0, 0, {"use": u"required"}), ] subclass = None superclass = IonChannel - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, species=None, type=None, conductance=None, gates=None, gate_hh_rates=None, gate_h_hrates_taus=None, gate_hh_tau_infs=None, gate_h_hrates_infs=None, gate_h_hrates_tau_infs=None, gate_hh_instantaneouses=None, gate_fractionals=None, v_shift=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + q10_conductance_scalings=None, + species=None, + type=None, + conductance=None, + gates=None, + gate_hh_rates=None, + gate_h_hrates_taus=None, + gate_hh_tau_infs=None, + gate_h_hrates_infs=None, + gate_h_hrates_tau_infs=None, + gate_hh_instantaneouses=None, + gate_fractionals=None, + v_shift=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannelVShift, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, q10_conductance_scalings, species, type, conductance, gates, gate_hh_rates, gate_h_hrates_taus, gate_hh_tau_infs, gate_h_hrates_infs, gate_h_hrates_tau_infs, gate_hh_instantaneouses, gate_fractionals, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IonChannelVShift, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + q10_conductance_scalings, + species, + type, + conductance, + gates, + gate_hh_rates, + gate_h_hrates_taus, + gate_hh_tau_infs, + gate_h_hrates_infs, + gate_h_hrates_tau_infs, + gate_hh_instantaneouses, + gate_fractionals, + **kwargs_ + ) self.v_shift = _cast(None, v_shift) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IonChannelVShift) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IonChannelVShift) if subclass is not None: return subclass(*args_, **kwargs_) if IonChannelVShift.subclass: return IonChannelVShift.subclass(*args_, **kwargs_) else: return IonChannelVShift(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$"] + ] + def hasContent_(self): - if ( - super(IonChannelVShift, self).hasContent_() - ): + if super(IonChannelVShift, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelVShift', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannelVShift') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannelVShift", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IonChannelVShift") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelVShift') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IonChannelVShift", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelVShift', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IonChannelVShift", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelVShift'): - super(IonChannelVShift, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelVShift') - if self.v_shift is not None and 'v_shift' not in already_processed: - already_processed.add('v_shift') - outfile.write(' vShift=%s' % (quote_attrib(self.v_shift), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelVShift', fromsubclass_=False, pretty_print=True): - super(IonChannelVShift, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IonChannelVShift", + ): + super(IonChannelVShift, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IonChannelVShift", + ) + if self.v_shift is not None and "v_shift" not in already_processed: + already_processed.add("v_shift") + outfile.write(" vShift=%s" % (quote_attrib(self.v_shift),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannelVShift", + fromsubclass_=False, + pretty_print=True, + ): + super(IonChannelVShift, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -22476,16 +41561,22 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('vShift', node) - if value is not None and 'vShift' not in already_processed: - already_processed.add('vShift') + value = find_attr_value_("vShift", node) + if value is not None and "vShift" not in already_processed: + already_processed.add("vShift") self.v_shift = value - self.validate_Nml2Quantity_voltage(self.v_shift) # validate type Nml2Quantity_voltage + self.validate_Nml2Quantity_voltage( + self.v_shift + ) # validate type Nml2Quantity_voltage super(IonChannelVShift, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(IonChannelVShift, self).buildChildren(child_, node, nodeName_, True) pass + + # end class IonChannelVShift @@ -22494,57 +41585,147 @@ class IonChannelHH(IonChannel): identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. One of these should be removed, probably ionChannelHH.""" - member_data_items_ = [ - ] + + member_data_items_ = [] subclass = None superclass = IonChannel - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, species=None, type=None, conductance=None, gates=None, gate_hh_rates=None, gate_h_hrates_taus=None, gate_hh_tau_infs=None, gate_h_hrates_infs=None, gate_h_hrates_tau_infs=None, gate_hh_instantaneouses=None, gate_fractionals=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + q10_conductance_scalings=None, + species=None, + type=None, + conductance=None, + gates=None, + gate_hh_rates=None, + gate_h_hrates_taus=None, + gate_hh_tau_infs=None, + gate_h_hrates_infs=None, + gate_h_hrates_tau_infs=None, + gate_hh_instantaneouses=None, + gate_fractionals=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannelHH, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, q10_conductance_scalings, species, type, conductance, gates, gate_hh_rates, gate_h_hrates_taus, gate_hh_tau_infs, gate_h_hrates_infs, gate_h_hrates_tau_infs, gate_hh_instantaneouses, gate_fractionals, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IonChannelHH, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + q10_conductance_scalings, + species, + type, + conductance, + gates, + gate_hh_rates, + gate_h_hrates_taus, + gate_hh_tau_infs, + gate_h_hrates_infs, + gate_h_hrates_tau_infs, + gate_hh_instantaneouses, + gate_fractionals, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IonChannelHH) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IonChannelHH) if subclass is not None: return subclass(*args_, **kwargs_) if IonChannelHH.subclass: return IonChannelHH.subclass(*args_, **kwargs_) else: return IonChannelHH(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(IonChannelHH, self).hasContent_() - ): + if super(IonChannelHH, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelHH', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannelHH') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannelHH", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IonChannelHH") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelHH') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IonChannelHH" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelHH', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IonChannelHH", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelHH'): - super(IonChannelHH, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelHH') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelHH', fromsubclass_=False, pretty_print=True): - super(IonChannelHH, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IonChannelHH", + ): + super(IonChannelHH, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IonChannelHH" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannelHH", + fromsubclass_=False, + pretty_print=True, + ): + super(IonChannelHH, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -22552,66 +41733,155 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(IonChannelHH, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(IonChannelHH, self).buildChildren(child_, node, nodeName_, True) pass + + # end class IonChannelHH class IF_curr_exp(basePyNNIaFCell): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = basePyNNIaFCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IF_curr_exp, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IF_curr_exp, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IF_curr_exp) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IF_curr_exp) if subclass is not None: return subclass(*args_, **kwargs_) if IF_curr_exp.subclass: return IF_curr_exp.subclass(*args_, **kwargs_) else: return IF_curr_exp(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(IF_curr_exp, self).hasContent_() - ): + if super(IF_curr_exp, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_curr_exp', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IF_curr_exp') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_curr_exp", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IF_curr_exp") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_exp') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_curr_exp" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_curr_exp', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IF_curr_exp", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_curr_exp'): - super(IF_curr_exp, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_exp') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_curr_exp', fromsubclass_=False, pretty_print=True): - super(IF_curr_exp, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IF_curr_exp", + ): + super(IF_curr_exp, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_curr_exp" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_curr_exp", + fromsubclass_=False, + pretty_print=True, + ): + super(IF_curr_exp, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -22619,66 +41889,155 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(IF_curr_exp, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(IF_curr_exp, self).buildChildren(child_, node, nodeName_, True) pass + + # end class IF_curr_exp class IF_curr_alpha(basePyNNIaFCell): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = basePyNNIaFCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IF_curr_alpha, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IF_curr_alpha, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IF_curr_alpha) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IF_curr_alpha) if subclass is not None: return subclass(*args_, **kwargs_) if IF_curr_alpha.subclass: return IF_curr_alpha.subclass(*args_, **kwargs_) else: return IF_curr_alpha(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(IF_curr_alpha, self).hasContent_() - ): + if super(IF_curr_alpha, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_curr_alpha', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IF_curr_alpha') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_curr_alpha", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IF_curr_alpha") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_alpha') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_curr_alpha" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_curr_alpha', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IF_curr_alpha", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_curr_alpha'): - super(IF_curr_alpha, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_alpha') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_curr_alpha', fromsubclass_=False, pretty_print=True): - super(IF_curr_alpha, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IF_curr_alpha", + ): + super(IF_curr_alpha, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_curr_alpha" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_curr_alpha", + fromsubclass_=False, + pretty_print=True, + ): + super(IF_curr_alpha, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -22686,81 +42045,191 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(IF_curr_alpha, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(IF_curr_alpha, self).buildChildren(child_, node, nodeName_, True) pass + + # end class IF_curr_alpha class basePyNNIaFCondCell(basePyNNIaFCell): member_data_items_ = [ - MemberSpec_('e_rev_E', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_I', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("e_rev_E", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("e_rev_I", "xs:float", 0, 0, {"use": u"required"}), ] subclass = None superclass = basePyNNIaFCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + e_rev_E=None, + e_rev_I=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(basePyNNIaFCondCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(basePyNNIaFCondCell, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + extensiontype_, + **kwargs_ + ) self.e_rev_E = _cast(float, e_rev_E) self.e_rev_I = _cast(float, e_rev_I) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, basePyNNIaFCondCell) + CurrentSubclassModule_, basePyNNIaFCondCell + ) if subclass is not None: return subclass(*args_, **kwargs_) if basePyNNIaFCondCell.subclass: return basePyNNIaFCondCell.subclass(*args_, **kwargs_) else: return basePyNNIaFCondCell(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(basePyNNIaFCondCell, self).hasContent_() - ): + if super(basePyNNIaFCondCell, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNIaFCondCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('basePyNNIaFCondCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="basePyNNIaFCondCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("basePyNNIaFCondCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCondCell') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="basePyNNIaFCondCell", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='basePyNNIaFCondCell', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="basePyNNIaFCondCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='basePyNNIaFCondCell'): - super(basePyNNIaFCondCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCondCell') - if self.e_rev_E is not None and 'e_rev_E' not in already_processed: - already_processed.add('e_rev_E') - outfile.write(' e_rev_E="%s"' % self.gds_format_float(self.e_rev_E, input_name='e_rev_E')) - if self.e_rev_I is not None and 'e_rev_I' not in already_processed: - already_processed.add('e_rev_I') - outfile.write(' e_rev_I="%s"' % self.gds_format_float(self.e_rev_I, input_name='e_rev_I')) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="basePyNNIaFCondCell", + ): + super(basePyNNIaFCondCell, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="basePyNNIaFCondCell", + ) + if self.e_rev_E is not None and "e_rev_E" not in already_processed: + already_processed.add("e_rev_E") + outfile.write( + ' e_rev_E="%s"' + % self.gds_format_float(self.e_rev_E, input_name="e_rev_E") + ) + if self.e_rev_I is not None and "e_rev_I" not in already_processed: + already_processed.add("e_rev_I") + outfile.write( + ' e_rev_I="%s"' + % self.gds_format_float(self.e_rev_I, input_name="e_rev_I") + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNIaFCondCell', fromsubclass_=False, pretty_print=True): - super(basePyNNIaFCondCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="basePyNNIaFCondCell", + fromsubclass_=False, + pretty_print=True, + ): + super(basePyNNIaFCondCell, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -22768,90 +42237,180 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('e_rev_E', node) - if value is not None and 'e_rev_E' not in already_processed: - already_processed.add('e_rev_E') + value = find_attr_value_("e_rev_E", node) + if value is not None and "e_rev_E" not in already_processed: + already_processed.add("e_rev_E") try: self.e_rev_E = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_E): %s' % exp) - value = find_attr_value_('e_rev_I', node) - if value is not None and 'e_rev_I' not in already_processed: - already_processed.add('e_rev_I') + raise ValueError("Bad float/double attribute (e_rev_E): %s" % exp) + value = find_attr_value_("e_rev_I", node) + if value is not None and "e_rev_I" not in already_processed: + already_processed.add("e_rev_I") try: self.e_rev_I = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_I): %s' % exp) - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise ValueError("Bad float/double attribute (e_rev_I): %s" % exp) + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(basePyNNIaFCondCell, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(basePyNNIaFCondCell, self).buildChildren(child_, node, nodeName_, True) pass + + # end class basePyNNIaFCondCell class ContinuousConnectionInstance(ContinuousConnection): """Individual continuous/analog synaptic connection - instance based""" - member_data_items_ = [ - ] + + member_data_items_ = [] subclass = None superclass = ContinuousConnection - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', pre_component=None, post_component=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + pre_component=None, + post_component=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ContinuousConnectionInstance, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, pre_component, post_component, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ContinuousConnectionInstance, self).__init__( + neuro_lex_id, + id, + pre_cell, + pre_segment, + pre_fraction_along, + post_cell, + post_segment, + post_fraction_along, + pre_component, + post_component, + extensiontype_, + **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ContinuousConnectionInstance) + CurrentSubclassModule_, ContinuousConnectionInstance + ) if subclass is not None: return subclass(*args_, **kwargs_) if ContinuousConnectionInstance.subclass: return ContinuousConnectionInstance.subclass(*args_, **kwargs_) else: return ContinuousConnectionInstance(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(ContinuousConnectionInstance, self).hasContent_() - ): + if super(ContinuousConnectionInstance, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnectionInstance', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContinuousConnectionInstance') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousConnectionInstance", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ContinuousConnectionInstance") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstance') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousConnectionInstance", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousConnectionInstance', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousConnectionInstance'): - super(ContinuousConnectionInstance, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstance') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ContinuousConnectionInstance", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ContinuousConnectionInstance", + ): + super(ContinuousConnectionInstance, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousConnectionInstance", + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnectionInstance', fromsubclass_=False, pretty_print=True): - super(ContinuousConnectionInstance, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousConnectionInstance", + fromsubclass_=False, + pretty_print=True, + ): + super(ContinuousConnectionInstance, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -22859,27 +42418,42 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(ContinuousConnectionInstance, self).buildAttributes(node, attrs, already_processed) + super(ContinuousConnectionInstance, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ContinuousConnectionInstance, self).buildChildren(child_, node, nodeName_, True) + super(ContinuousConnectionInstance, self).buildChildren( + child_, node, nodeName_, True + ) pass def _get_cell_id(self, id_string): - if '[' in id_string: - return int(id_string.split('[')[1].split(']')[0]) + if "[" in id_string: + return int(id_string.split("[")[1].split("]")[0]) else: - return int(id_string.split('/')[2]) - + return int(id_string.split("/")[2]) def __str__(self): - return "Continuous Connection (Instance based) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component) - + return ( + "Continuous Connection (Instance based) " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", pre comp: " + + str(self.pre_component) + + ", post comp: " + + str(self.post_component) + ) # end class ContinuousConnectionInstance @@ -22887,62 +42461,146 @@ def __str__(self): class ElectricalConnectionInstance(ElectricalConnection): """Projection between two populations consisting of analog connections (e.g. graded synapses)""" - member_data_items_ = [ - ] + + member_data_items_ = [] subclass = None superclass = ElectricalConnection - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', synapse=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + synapse=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ElectricalConnectionInstance, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, synapse, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ElectricalConnectionInstance, self).__init__( + neuro_lex_id, + id, + pre_cell, + pre_segment, + pre_fraction_along, + post_cell, + post_segment, + post_fraction_along, + synapse, + extensiontype_, + **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ElectricalConnectionInstance) + CurrentSubclassModule_, ElectricalConnectionInstance + ) if subclass is not None: return subclass(*args_, **kwargs_) if ElectricalConnectionInstance.subclass: return ElectricalConnectionInstance.subclass(*args_, **kwargs_) else: return ElectricalConnectionInstance(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(ElectricalConnectionInstance, self).hasContent_() - ): + if super(ElectricalConnectionInstance, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnectionInstance', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ElectricalConnectionInstance') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalConnectionInstance", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ElectricalConnectionInstance") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstance') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalConnectionInstance", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalConnectionInstance', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalConnectionInstance'): - super(ElectricalConnectionInstance, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstance') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ElectricalConnectionInstance", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ElectricalConnectionInstance", + ): + super(ElectricalConnectionInstance, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalConnectionInstance", + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnectionInstance', fromsubclass_=False, pretty_print=True): - super(ElectricalConnectionInstance, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalConnectionInstance", + fromsubclass_=False, + pretty_print=True, + ): + super(ElectricalConnectionInstance, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -22950,104 +42608,204 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(ElectricalConnectionInstance, self).buildAttributes(node, attrs, already_processed) + super(ElectricalConnectionInstance, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ElectricalConnectionInstance, self).buildChildren(child_, node, nodeName_, True) + super(ElectricalConnectionInstance, self).buildChildren( + child_, node, nodeName_, True + ) pass def _get_cell_id(self, id_string): - if '[' in id_string: - return int(id_string.split('[')[1].split(']')[0]) + if "[" in id_string: + return int(id_string.split("[")[1].split("]")[0]) else: - return int(id_string.split('/')[2]) + return int(id_string.split("/")[2]) def __str__(self): - return "Electrical Connection (Instance based) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", synapse: "+str(self.synapse) - + return ( + "Electrical Connection (Instance based) " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", synapse: " + + str(self.synapse) + ) # end class ElectricalConnectionInstance class ExpThreeSynapse(BaseConductanceBasedSynapseTwo): member_data_items_ = [ - MemberSpec_('tau_decay1', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('tau_decay2', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('tau_rise', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("tau_decay1", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("tau_decay2", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("tau_rise", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseConductanceBasedSynapseTwo - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase1=None, gbase2=None, erev=None, tau_decay1=None, tau_decay2=None, tau_rise=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase1=None, + gbase2=None, + erev=None, + tau_decay1=None, + tau_decay2=None, + tau_rise=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ExpThreeSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase1, gbase2, erev, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ExpThreeSynapse, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + gbase1, + gbase2, + erev, + **kwargs_ + ) self.tau_decay1 = _cast(None, tau_decay1) self.tau_decay2 = _cast(None, tau_decay2) self.tau_rise = _cast(None, tau_rise) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExpThreeSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ExpThreeSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if ExpThreeSynapse.subclass: return ExpThreeSynapse.subclass(*args_, **kwargs_) else: return ExpThreeSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(ExpThreeSynapse, self).hasContent_() - ): + if super(ExpThreeSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpThreeSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpThreeSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpThreeSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExpThreeSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpThreeSynapse') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpThreeSynapse" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpThreeSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExpThreeSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpThreeSynapse'): - super(ExpThreeSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpThreeSynapse') - if self.tau_decay1 is not None and 'tau_decay1' not in already_processed: - already_processed.add('tau_decay1') - outfile.write(' tauDecay1=%s' % (quote_attrib(self.tau_decay1), )) - if self.tau_decay2 is not None and 'tau_decay2' not in already_processed: - already_processed.add('tau_decay2') - outfile.write(' tauDecay2=%s' % (quote_attrib(self.tau_decay2), )) - if self.tau_rise is not None and 'tau_rise' not in already_processed: - already_processed.add('tau_rise') - outfile.write(' tauRise=%s' % (quote_attrib(self.tau_rise), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpThreeSynapse', fromsubclass_=False, pretty_print=True): - super(ExpThreeSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExpThreeSynapse", + ): + super(ExpThreeSynapse, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpThreeSynapse" + ) + if self.tau_decay1 is not None and "tau_decay1" not in already_processed: + already_processed.add("tau_decay1") + outfile.write(" tauDecay1=%s" % (quote_attrib(self.tau_decay1),)) + if self.tau_decay2 is not None and "tau_decay2" not in already_processed: + already_processed.add("tau_decay2") + outfile.write(" tauDecay2=%s" % (quote_attrib(self.tau_decay2),)) + if self.tau_rise is not None and "tau_rise" not in already_processed: + already_processed.add("tau_rise") + outfile.write(" tauRise=%s" % (quote_attrib(self.tau_rise),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpThreeSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(ExpThreeSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -23055,103 +42813,198 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tauDecay1', node) - if value is not None and 'tauDecay1' not in already_processed: - already_processed.add('tauDecay1') + value = find_attr_value_("tauDecay1", node) + if value is not None and "tauDecay1" not in already_processed: + already_processed.add("tauDecay1") self.tau_decay1 = value - self.validate_Nml2Quantity_time(self.tau_decay1) # validate type Nml2Quantity_time - value = find_attr_value_('tauDecay2', node) - if value is not None and 'tauDecay2' not in already_processed: - already_processed.add('tauDecay2') + self.validate_Nml2Quantity_time( + self.tau_decay1 + ) # validate type Nml2Quantity_time + value = find_attr_value_("tauDecay2", node) + if value is not None and "tauDecay2" not in already_processed: + already_processed.add("tauDecay2") self.tau_decay2 = value - self.validate_Nml2Quantity_time(self.tau_decay2) # validate type Nml2Quantity_time - value = find_attr_value_('tauRise', node) - if value is not None and 'tauRise' not in already_processed: - already_processed.add('tauRise') + self.validate_Nml2Quantity_time( + self.tau_decay2 + ) # validate type Nml2Quantity_time + value = find_attr_value_("tauRise", node) + if value is not None and "tauRise" not in already_processed: + already_processed.add("tauRise") self.tau_rise = value - self.validate_Nml2Quantity_time(self.tau_rise) # validate type Nml2Quantity_time + self.validate_Nml2Quantity_time( + self.tau_rise + ) # validate type Nml2Quantity_time super(ExpThreeSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ExpThreeSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class ExpThreeSynapse class ExpTwoSynapse(BaseConductanceBasedSynapse): member_data_items_ = [ - MemberSpec_('tau_decay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('tau_rise', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("tau_decay", "Nml2Quantity_time", 0, 0, {"use": u"required"}), + MemberSpec_("tau_rise", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseConductanceBasedSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau_decay=None, tau_rise=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase=None, + erev=None, + tau_decay=None, + tau_rise=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ExpTwoSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ExpTwoSynapse, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + gbase, + erev, + extensiontype_, + **kwargs_ + ) self.tau_decay = _cast(None, tau_decay) self.tau_rise = _cast(None, tau_rise) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExpTwoSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ExpTwoSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if ExpTwoSynapse.subclass: return ExpTwoSynapse.subclass(*args_, **kwargs_) else: return ExpTwoSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(ExpTwoSynapse, self).hasContent_() - ): + if super(ExpTwoSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpTwoSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpTwoSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpTwoSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExpTwoSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpTwoSynapse') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpTwoSynapse" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpTwoSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExpTwoSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpTwoSynapse'): - super(ExpTwoSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpTwoSynapse') - if self.tau_decay is not None and 'tau_decay' not in already_processed: - already_processed.add('tau_decay') - outfile.write(' tauDecay=%s' % (quote_attrib(self.tau_decay), )) - if self.tau_rise is not None and 'tau_rise' not in already_processed: - already_processed.add('tau_rise') - outfile.write(' tauRise=%s' % (quote_attrib(self.tau_rise), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExpTwoSynapse", + ): + super(ExpTwoSynapse, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpTwoSynapse" + ) + if self.tau_decay is not None and "tau_decay" not in already_processed: + already_processed.add("tau_decay") + outfile.write(" tauDecay=%s" % (quote_attrib(self.tau_decay),)) + if self.tau_rise is not None and "tau_rise" not in already_processed: + already_processed.add("tau_rise") + outfile.write(" tauRise=%s" % (quote_attrib(self.tau_rise),)) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpTwoSynapse', fromsubclass_=False, pretty_print=True): - super(ExpTwoSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpTwoSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(ExpTwoSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -23159,92 +43012,182 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tauDecay', node) - if value is not None and 'tauDecay' not in already_processed: - already_processed.add('tauDecay') + value = find_attr_value_("tauDecay", node) + if value is not None and "tauDecay" not in already_processed: + already_processed.add("tauDecay") self.tau_decay = value - self.validate_Nml2Quantity_time(self.tau_decay) # validate type Nml2Quantity_time - value = find_attr_value_('tauRise', node) - if value is not None and 'tauRise' not in already_processed: - already_processed.add('tauRise') + self.validate_Nml2Quantity_time( + self.tau_decay + ) # validate type Nml2Quantity_time + value = find_attr_value_("tauRise", node) + if value is not None and "tauRise" not in already_processed: + already_processed.add("tauRise") self.tau_rise = value - self.validate_Nml2Quantity_time(self.tau_rise) # validate type Nml2Quantity_time - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_time( + self.tau_rise + ) # validate type Nml2Quantity_time + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value super(ExpTwoSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ExpTwoSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class ExpTwoSynapse class ExpOneSynapse(BaseConductanceBasedSynapse): member_data_items_ = [ - MemberSpec_('tau_decay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("tau_decay", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseConductanceBasedSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau_decay=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase=None, + erev=None, + tau_decay=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ExpOneSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ExpOneSynapse, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + gbase, + erev, + **kwargs_ + ) self.tau_decay = _cast(None, tau_decay) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExpOneSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ExpOneSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if ExpOneSynapse.subclass: return ExpOneSynapse.subclass(*args_, **kwargs_) else: return ExpOneSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(ExpOneSynapse, self).hasContent_() - ): + if super(ExpOneSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpOneSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpOneSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpOneSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExpOneSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpOneSynapse') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpOneSynapse" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpOneSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExpOneSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpOneSynapse'): - super(ExpOneSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpOneSynapse') - if self.tau_decay is not None and 'tau_decay' not in already_processed: - already_processed.add('tau_decay') - outfile.write(' tauDecay=%s' % (quote_attrib(self.tau_decay), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpOneSynapse', fromsubclass_=False, pretty_print=True): - super(ExpOneSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExpOneSynapse", + ): + super(ExpOneSynapse, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpOneSynapse" + ) + if self.tau_decay is not None and "tau_decay" not in already_processed: + already_processed.add("tau_decay") + outfile.write(" tauDecay=%s" % (quote_attrib(self.tau_decay),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpOneSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(ExpOneSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -23252,83 +43195,171 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tauDecay', node) - if value is not None and 'tauDecay' not in already_processed: - already_processed.add('tauDecay') + value = find_attr_value_("tauDecay", node) + if value is not None and "tauDecay" not in already_processed: + already_processed.add("tauDecay") self.tau_decay = value - self.validate_Nml2Quantity_time(self.tau_decay) # validate type Nml2Quantity_time + self.validate_Nml2Quantity_time( + self.tau_decay + ) # validate type Nml2Quantity_time super(ExpOneSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(ExpOneSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class ExpOneSynapse class AlphaSynapse(BaseConductanceBasedSynapse): member_data_items_ = [ - MemberSpec_('tau', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("tau", "Nml2Quantity_time", 0, 0, {"use": u"required"}), ] subclass = None superclass = BaseConductanceBasedSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase=None, + erev=None, + tau=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(AlphaSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(AlphaSynapse, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + gbase, + erev, + **kwargs_ + ) self.tau = _cast(None, tau) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, AlphaSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, AlphaSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if AlphaSynapse.subclass: return AlphaSynapse.subclass(*args_, **kwargs_) else: return AlphaSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. if value is not None and Validate_simpletypes_: if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + warnings_.warn( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + value.encode("utf-8"), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + [u"^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$"] + ] + def hasContent_(self): - if ( - super(AlphaSynapse, self).hasContent_() - ): + if super(AlphaSynapse, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('AlphaSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("AlphaSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaSynapse') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="AlphaSynapse" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="AlphaSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaSynapse'): - super(AlphaSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaSynapse') - if self.tau is not None and 'tau' not in already_processed: - already_processed.add('tau') - outfile.write(' tau=%s' % (quote_attrib(self.tau), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaSynapse', fromsubclass_=False, pretty_print=True): - super(AlphaSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="AlphaSynapse", + ): + super(AlphaSynapse, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="AlphaSynapse" + ) + if self.tau is not None and "tau" not in already_processed: + already_processed.add("tau") + outfile.write(" tau=%s" % (quote_attrib(self.tau),)) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(AlphaSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -23336,101 +43367,220 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tau', node) - if value is not None and 'tau' not in already_processed: - already_processed.add('tau') + value = find_attr_value_("tau", node) + if value is not None and "tau" not in already_processed: + already_processed.add("tau") self.tau = value - self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time + self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time super(AlphaSynapse, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(AlphaSynapse, self).buildChildren(child_, node, nodeName_, True) pass + + # end class AlphaSynapse class EIF_cond_exp_isfa_ista(basePyNNIaFCondCell): member_data_items_ = [ - MemberSpec_('a', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('delta_T', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('tau_w', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_spike', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("a", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("b", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("delta_T", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("tau_w", "xs:float", 0, 0, {"use": u"required"}), + MemberSpec_("v_spike", "xs:float", 0, 0, {"use": u"required"}), ] subclass = None superclass = basePyNNIaFCondCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, a=None, b=None, delta_T=None, tau_w=None, v_spike=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + e_rev_E=None, + e_rev_I=None, + a=None, + b=None, + delta_T=None, + tau_w=None, + v_spike=None, + extensiontype_=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(EIF_cond_exp_isfa_ista, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(EIF_cond_exp_isfa_ista, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + e_rev_E, + e_rev_I, + extensiontype_, + **kwargs_ + ) self.a = _cast(float, a) self.b = _cast(float, b) self.delta_T = _cast(float, delta_T) self.tau_w = _cast(float, tau_w) self.v_spike = _cast(float, v_spike) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, EIF_cond_exp_isfa_ista) + CurrentSubclassModule_, EIF_cond_exp_isfa_ista + ) if subclass is not None: return subclass(*args_, **kwargs_) if EIF_cond_exp_isfa_ista.subclass: return EIF_cond_exp_isfa_ista.subclass(*args_, **kwargs_) else: return EIF_cond_exp_isfa_ista(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(EIF_cond_exp_isfa_ista, self).hasContent_() - ): + if super(EIF_cond_exp_isfa_ista, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EIF_cond_exp_isfa_ista', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('EIF_cond_exp_isfa_ista') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EIF_cond_exp_isfa_ista", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("EIF_cond_exp_isfa_ista") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_exp_isfa_ista') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="EIF_cond_exp_isfa_ista", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EIF_cond_exp_isfa_ista', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="EIF_cond_exp_isfa_ista", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EIF_cond_exp_isfa_ista'): - super(EIF_cond_exp_isfa_ista, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_exp_isfa_ista') - if self.a is not None and 'a' not in already_processed: - already_processed.add('a') - outfile.write(' a="%s"' % self.gds_format_float(self.a, input_name='a')) - if self.b is not None and 'b' not in already_processed: - already_processed.add('b') - outfile.write(' b="%s"' % self.gds_format_float(self.b, input_name='b')) - if self.delta_T is not None and 'delta_T' not in already_processed: - already_processed.add('delta_T') - outfile.write(' delta_T="%s"' % self.gds_format_float(self.delta_T, input_name='delta_T')) - if self.tau_w is not None and 'tau_w' not in already_processed: - already_processed.add('tau_w') - outfile.write(' tau_w="%s"' % self.gds_format_float(self.tau_w, input_name='tau_w')) - if self.v_spike is not None and 'v_spike' not in already_processed: - already_processed.add('v_spike') - outfile.write(' v_spike="%s"' % self.gds_format_float(self.v_spike, input_name='v_spike')) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="EIF_cond_exp_isfa_ista", + ): + super(EIF_cond_exp_isfa_ista, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="EIF_cond_exp_isfa_ista", + ) + if self.a is not None and "a" not in already_processed: + already_processed.add("a") + outfile.write(' a="%s"' % self.gds_format_float(self.a, input_name="a")) + if self.b is not None and "b" not in already_processed: + already_processed.add("b") + outfile.write(' b="%s"' % self.gds_format_float(self.b, input_name="b")) + if self.delta_T is not None and "delta_T" not in already_processed: + already_processed.add("delta_T") + outfile.write( + ' delta_T="%s"' + % self.gds_format_float(self.delta_T, input_name="delta_T") + ) + if self.tau_w is not None and "tau_w" not in already_processed: + already_processed.add("tau_w") + outfile.write( + ' tau_w="%s"' % self.gds_format_float(self.tau_w, input_name="tau_w") + ) + if self.v_spike is not None and "v_spike" not in already_processed: + already_processed.add("v_spike") + outfile.write( + ' v_spike="%s"' + % self.gds_format_float(self.v_spike, input_name="v_spike") + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EIF_cond_exp_isfa_ista', fromsubclass_=False, pretty_print=True): - super(EIF_cond_exp_isfa_ista, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EIF_cond_exp_isfa_ista", + fromsubclass_=False, + pretty_print=True, + ): + super(EIF_cond_exp_isfa_ista, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -23438,105 +43588,200 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('a', node) - if value is not None and 'a' not in already_processed: - already_processed.add('a') + value = find_attr_value_("a", node) + if value is not None and "a" not in already_processed: + already_processed.add("a") try: self.a = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (a): %s' % exp) - value = find_attr_value_('b', node) - if value is not None and 'b' not in already_processed: - already_processed.add('b') + raise ValueError("Bad float/double attribute (a): %s" % exp) + value = find_attr_value_("b", node) + if value is not None and "b" not in already_processed: + already_processed.add("b") try: self.b = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (b): %s' % exp) - value = find_attr_value_('delta_T', node) - if value is not None and 'delta_T' not in already_processed: - already_processed.add('delta_T') + raise ValueError("Bad float/double attribute (b): %s" % exp) + value = find_attr_value_("delta_T", node) + if value is not None and "delta_T" not in already_processed: + already_processed.add("delta_T") try: self.delta_T = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (delta_T): %s' % exp) - value = find_attr_value_('tau_w', node) - if value is not None and 'tau_w' not in already_processed: - already_processed.add('tau_w') + raise ValueError("Bad float/double attribute (delta_T): %s" % exp) + value = find_attr_value_("tau_w", node) + if value is not None and "tau_w" not in already_processed: + already_processed.add("tau_w") try: self.tau_w = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_w): %s' % exp) - value = find_attr_value_('v_spike', node) - if value is not None and 'v_spike' not in already_processed: - already_processed.add('v_spike') + raise ValueError("Bad float/double attribute (tau_w): %s" % exp) + value = find_attr_value_("v_spike", node) + if value is not None and "v_spike" not in already_processed: + already_processed.add("v_spike") try: self.v_spike = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (v_spike): %s' % exp) - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise ValueError("Bad float/double attribute (v_spike): %s" % exp) + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(EIF_cond_exp_isfa_ista, self).buildAttributes(node, attrs, already_processed) + super(EIF_cond_exp_isfa_ista, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(EIF_cond_exp_isfa_ista, self).buildChildren(child_, node, nodeName_, True) pass + + # end class EIF_cond_exp_isfa_ista class IF_cond_exp(basePyNNIaFCondCell): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = basePyNNIaFCondCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + e_rev_E=None, + e_rev_I=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IF_cond_exp, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IF_cond_exp, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + e_rev_E, + e_rev_I, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IF_cond_exp) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IF_cond_exp) if subclass is not None: return subclass(*args_, **kwargs_) if IF_cond_exp.subclass: return IF_cond_exp.subclass(*args_, **kwargs_) else: return IF_cond_exp(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(IF_cond_exp, self).hasContent_() - ): + if super(IF_cond_exp, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_cond_exp', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IF_cond_exp') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_cond_exp", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IF_cond_exp") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_exp') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_cond_exp" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_cond_exp', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IF_cond_exp", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_cond_exp'): - super(IF_cond_exp, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_exp') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_cond_exp', fromsubclass_=False, pretty_print=True): - super(IF_cond_exp, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IF_cond_exp", + ): + super(IF_cond_exp, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_cond_exp" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_cond_exp", + fromsubclass_=False, + pretty_print=True, + ): + super(IF_cond_exp, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -23544,66 +43789,159 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(IF_cond_exp, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(IF_cond_exp, self).buildChildren(child_, node, nodeName_, True) pass + + # end class IF_cond_exp class IF_cond_alpha(basePyNNIaFCondCell): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = basePyNNIaFCondCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + e_rev_E=None, + e_rev_I=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IF_cond_alpha, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(IF_cond_alpha, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + e_rev_E, + e_rev_I, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IF_cond_alpha) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IF_cond_alpha) if subclass is not None: return subclass(*args_, **kwargs_) if IF_cond_alpha.subclass: return IF_cond_alpha.subclass(*args_, **kwargs_) else: return IF_cond_alpha(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(IF_cond_alpha, self).hasContent_() - ): + if super(IF_cond_alpha, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_cond_alpha', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IF_cond_alpha') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_cond_alpha", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IF_cond_alpha") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_alpha') + self.exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_cond_alpha" + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_cond_alpha', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IF_cond_alpha", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_cond_alpha'): - super(IF_cond_alpha, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_alpha') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_cond_alpha', fromsubclass_=False, pretty_print=True): - super(IF_cond_alpha, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IF_cond_alpha", + ): + super(IF_cond_alpha, self).exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_cond_alpha" + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_cond_alpha", + fromsubclass_=False, + pretty_print=True, + ): + super(IF_cond_alpha, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -23611,73 +43949,165 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): super(IF_cond_alpha, self).buildAttributes(node, attrs, already_processed) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(IF_cond_alpha, self).buildChildren(child_, node, nodeName_, True) pass + + # end class IF_cond_alpha class ContinuousConnectionInstanceW(ContinuousConnectionInstance): """Individual continuous/analog synaptic connection - instance based. Includes setting of _weight for the connection""" + member_data_items_ = [ - MemberSpec_('weight', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("weight", "xs:float", 0, 0, {"use": u"required"}), ] subclass = None superclass = ContinuousConnectionInstance - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', pre_component=None, post_component=None, weight=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + pre_component=None, + post_component=None, + weight=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ContinuousConnectionInstanceW, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, pre_component, post_component, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ContinuousConnectionInstanceW, self).__init__( + neuro_lex_id, + id, + pre_cell, + pre_segment, + pre_fraction_along, + post_cell, + post_segment, + post_fraction_along, + pre_component, + post_component, + **kwargs_ + ) self.weight = _cast(float, weight) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ContinuousConnectionInstanceW) + CurrentSubclassModule_, ContinuousConnectionInstanceW + ) if subclass is not None: return subclass(*args_, **kwargs_) if ContinuousConnectionInstanceW.subclass: return ContinuousConnectionInstanceW.subclass(*args_, **kwargs_) else: return ContinuousConnectionInstanceW(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(ContinuousConnectionInstanceW, self).hasContent_() - ): + if super(ContinuousConnectionInstanceW, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnectionInstanceW', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContinuousConnectionInstanceW') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousConnectionInstanceW", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ContinuousConnectionInstanceW") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstanceW') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousConnectionInstanceW", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousConnectionInstanceW', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousConnectionInstanceW'): - super(ContinuousConnectionInstanceW, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstanceW') - if self.weight is not None and 'weight' not in already_processed: - already_processed.add('weight') - outfile.write(' weight="%s"' % self.gds_format_float(self.weight, input_name='weight')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnectionInstanceW', fromsubclass_=False, pretty_print=True): - super(ContinuousConnectionInstanceW, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ContinuousConnectionInstanceW", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ContinuousConnectionInstanceW", + ): + super(ContinuousConnectionInstanceW, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousConnectionInstanceW", + ) + if self.weight is not None and "weight" not in already_processed: + already_processed.add("weight") + outfile.write( + ' weight="%s"' % self.gds_format_float(self.weight, input_name="weight") + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousConnectionInstanceW", + fromsubclass_=False, + pretty_print=True, + ): + super(ContinuousConnectionInstanceW, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -23685,17 +44115,23 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('weight', node) - if value is not None and 'weight' not in already_processed: - already_processed.add('weight') + value = find_attr_value_("weight", node) + if value is not None and "weight" not in already_processed: + already_processed.add("weight") try: self.weight = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (weight): %s' % exp) - super(ContinuousConnectionInstanceW, self).buildAttributes(node, attrs, already_processed) + raise ValueError("Bad float/double attribute (weight): %s" % exp) + super(ContinuousConnectionInstanceW, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ContinuousConnectionInstanceW, self).buildChildren(child_, node, nodeName_, True) + super(ContinuousConnectionInstanceW, self).buildChildren( + child_, node, nodeName_, True + ) pass def get_weight(self): @@ -23704,12 +44140,24 @@ def get_weight(self): If weight is not set, the default value of 1.0 is returned. """ - return float(self.weight) if self.weight!=None else 1.0 + return float(self.weight) if self.weight != None else 1.0 def __str__(self): - return "Continuous Connection (Instance based & weight) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component)+", weight: "+'%.6f'%self.get_weight() - + return ( + "Continuous Connection (Instance based & weight) " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", pre comp: " + + str(self.pre_component) + + ", post comp: " + + str(self.post_component) + + ", weight: " + + "%.6f" % self.get_weight() + ) # end class ContinuousConnectionInstanceW @@ -23718,62 +44166,148 @@ class ElectricalConnectionInstanceW(ElectricalConnectionInstance): """Projection between two populations consisting of analog connections (e.g. graded synapses). Includes setting of weight for the connection""" + member_data_items_ = [ - MemberSpec_('weight', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("weight", "xs:float", 0, 0, {"use": u"required"}), ] subclass = None superclass = ElectricalConnectionInstance - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', synapse=None, weight=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + synapse=None, + weight=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ElectricalConnectionInstanceW, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, synapse, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(ElectricalConnectionInstanceW, self).__init__( + neuro_lex_id, + id, + pre_cell, + pre_segment, + pre_fraction_along, + post_cell, + post_segment, + post_fraction_along, + synapse, + **kwargs_ + ) self.weight = _cast(float, weight) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ElectricalConnectionInstanceW) + CurrentSubclassModule_, ElectricalConnectionInstanceW + ) if subclass is not None: return subclass(*args_, **kwargs_) if ElectricalConnectionInstanceW.subclass: return ElectricalConnectionInstanceW.subclass(*args_, **kwargs_) else: return ElectricalConnectionInstanceW(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(ElectricalConnectionInstanceW, self).hasContent_() - ): + if super(ElectricalConnectionInstanceW, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnectionInstanceW', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ElectricalConnectionInstanceW') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalConnectionInstanceW", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ElectricalConnectionInstanceW") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstanceW') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalConnectionInstanceW", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalConnectionInstanceW', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalConnectionInstanceW'): - super(ElectricalConnectionInstanceW, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstanceW') - if self.weight is not None and 'weight' not in already_processed: - already_processed.add('weight') - outfile.write(' weight="%s"' % self.gds_format_float(self.weight, input_name='weight')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnectionInstanceW', fromsubclass_=False, pretty_print=True): - super(ElectricalConnectionInstanceW, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ElectricalConnectionInstanceW", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ElectricalConnectionInstanceW", + ): + super(ElectricalConnectionInstanceW, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalConnectionInstanceW", + ) + if self.weight is not None and "weight" not in already_processed: + already_processed.add("weight") + outfile.write( + ' weight="%s"' % self.gds_format_float(self.weight, input_name="weight") + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalConnectionInstanceW", + fromsubclass_=False, + pretty_print=True, + ): + super(ElectricalConnectionInstanceW, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) pass + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -23781,17 +44315,23 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('weight', node) - if value is not None and 'weight' not in already_processed: - already_processed.add('weight') + value = find_attr_value_("weight", node) + if value is not None and "weight" not in already_processed: + already_processed.add("weight") try: self.weight = float(value) except ValueError as exp: - raise ValueError('Bad float/double attribute (weight): %s' % exp) - super(ElectricalConnectionInstanceW, self).buildAttributes(node, attrs, already_processed) + raise ValueError("Bad float/double attribute (weight): %s" % exp) + super(ElectricalConnectionInstanceW, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ElectricalConnectionInstanceW, self).buildChildren(child_, node, nodeName_, True) + super(ElectricalConnectionInstanceW, self).buildChildren( + child_, node, nodeName_, True + ) pass def get_weight(self): @@ -23804,81 +44344,216 @@ def get_weight(self): :rtype: float """ - return float(self.weight) if self.weight!=None else 1.0 + return float(self.weight) if self.weight != None else 1.0 def __str__(self): - return "Electrical Connection (Instance based & weight) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", synapse: "+str(self.synapse) + ", weight: "+'%.6f'%self.get_weight() + return ( + "Electrical Connection (Instance based & weight) " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", synapse: " + + str(self.synapse) + + ", weight: " + + "%.6f" % self.get_weight() + ) # end class ElectricalConnectionInstanceW class BlockingPlasticSynapse(ExpTwoSynapse): member_data_items_ = [ - MemberSpec_('plasticity_mechanism', 'PlasticityMechanism', 0, 1, {u'type': u'PlasticityMechanism', u'name': u'plasticityMechanism', u'minOccurs': u'0'}, None), - MemberSpec_('block_mechanism', 'BlockMechanism', 0, 1, {u'type': u'BlockMechanism', u'name': u'blockMechanism', u'minOccurs': u'0'}, None), + MemberSpec_( + "plasticity_mechanism", + "PlasticityMechanism", + 0, + 1, + { + u"type": u"PlasticityMechanism", + u"name": u"plasticityMechanism", + u"minOccurs": u"0", + }, + None, + ), + MemberSpec_( + "block_mechanism", + "BlockMechanism", + 0, + 1, + { + u"type": u"BlockMechanism", + u"name": u"blockMechanism", + u"minOccurs": u"0", + }, + None, + ), ] subclass = None superclass = ExpTwoSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau_decay=None, tau_rise=None, plasticity_mechanism=None, block_mechanism=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase=None, + erev=None, + tau_decay=None, + tau_rise=None, + plasticity_mechanism=None, + block_mechanism=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BlockingPlasticSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, tau_decay, tau_rise, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(BlockingPlasticSynapse, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + gbase, + erev, + tau_decay, + tau_rise, + **kwargs_ + ) self.plasticity_mechanism = plasticity_mechanism self.block_mechanism = block_mechanism + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BlockingPlasticSynapse) + CurrentSubclassModule_, BlockingPlasticSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if BlockingPlasticSynapse.subclass: return BlockingPlasticSynapse.subclass(*args_, **kwargs_) else: return BlockingPlasticSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): if ( - self.plasticity_mechanism is not None or - self.block_mechanism is not None or - super(BlockingPlasticSynapse, self).hasContent_() + self.plasticity_mechanism is not None + or self.block_mechanism is not None + or super(BlockingPlasticSynapse, self).hasContent_() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BlockingPlasticSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BlockingPlasticSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BlockingPlasticSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BlockingPlasticSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BlockingPlasticSynapse') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BlockingPlasticSynapse", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BlockingPlasticSynapse', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BlockingPlasticSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BlockingPlasticSynapse'): - super(BlockingPlasticSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BlockingPlasticSynapse') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BlockingPlasticSynapse', fromsubclass_=False, pretty_print=True): - super(BlockingPlasticSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BlockingPlasticSynapse", + ): + super(BlockingPlasticSynapse, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BlockingPlasticSynapse", + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BlockingPlasticSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(BlockingPlasticSynapse, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.plasticity_mechanism is not None: - self.plasticity_mechanism.export(outfile, level, namespaceprefix_, namespacedef_='', name_='plasticityMechanism', pretty_print=pretty_print) + self.plasticity_mechanism.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="plasticityMechanism", + pretty_print=pretty_print, + ) if self.block_mechanism is not None: - self.block_mechanism.export(outfile, level, namespaceprefix_, namespacedef_='', name_='blockMechanism', pretty_print=pretty_print) + self.block_mechanism.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="blockMechanism", + pretty_print=pretty_print, + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -23886,75 +44561,190 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - super(BlockingPlasticSynapse, self).buildAttributes(node, attrs, already_processed) + super(BlockingPlasticSynapse, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'plasticityMechanism': + if nodeName_ == "plasticityMechanism": obj_ = PlasticityMechanism.factory(parent_object_=self) obj_.build(child_) self.plasticity_mechanism = obj_ - obj_.original_tagname_ = 'plasticityMechanism' - elif nodeName_ == 'blockMechanism': + obj_.original_tagname_ = "plasticityMechanism" + elif nodeName_ == "blockMechanism": obj_ = BlockMechanism.factory(parent_object_=self) obj_.build(child_) self.block_mechanism = obj_ - obj_.original_tagname_ = 'blockMechanism' + obj_.original_tagname_ = "blockMechanism" super(BlockingPlasticSynapse, self).buildChildren(child_, node, nodeName_, True) + + # end class BlockingPlasticSynapse class EIF_cond_alpha_isfa_ista(EIF_cond_exp_isfa_ista): - member_data_items_ = [ - ] + member_data_items_ = [] subclass = None superclass = EIF_cond_exp_isfa_ista - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, a=None, b=None, delta_T=None, tau_w=None, v_spike=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + e_rev_E=None, + e_rev_I=None, + a=None, + b=None, + delta_T=None, + tau_w=None, + v_spike=None, + **kwargs_ + ): self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(EIF_cond_alpha_isfa_ista, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, a, b, delta_T, tau_w, v_spike, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + super(EIF_cond_alpha_isfa_ista, self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + e_rev_E, + e_rev_I, + a, + b, + delta_T, + tau_w, + v_spike, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, EIF_cond_alpha_isfa_ista) + CurrentSubclassModule_, EIF_cond_alpha_isfa_ista + ) if subclass is not None: return subclass(*args_, **kwargs_) if EIF_cond_alpha_isfa_ista.subclass: return EIF_cond_alpha_isfa_ista.subclass(*args_, **kwargs_) else: return EIF_cond_alpha_isfa_ista(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): - if ( - super(EIF_cond_alpha_isfa_ista, self).hasContent_() - ): + if super(EIF_cond_alpha_isfa_ista, self).hasContent_(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EIF_cond_alpha_isfa_ista', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('EIF_cond_alpha_isfa_ista') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EIF_cond_alpha_isfa_ista", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("EIF_cond_alpha_isfa_ista") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_alpha_isfa_ista') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self.exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="EIF_cond_alpha_isfa_ista", + ) if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EIF_cond_alpha_isfa_ista', pretty_print=pretty_print) + outfile.write(">%s" % (eol_,)) + self.exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="EIF_cond_alpha_isfa_ista", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EIF_cond_alpha_isfa_ista'): - super(EIF_cond_alpha_isfa_ista, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_alpha_isfa_ista') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EIF_cond_alpha_isfa_ista', fromsubclass_=False, pretty_print=True): - super(EIF_cond_alpha_isfa_ista, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="EIF_cond_alpha_isfa_ista", + ): + super(EIF_cond_alpha_isfa_ista, self).exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="EIF_cond_alpha_isfa_ista", + ) + + def exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EIF_cond_alpha_isfa_ista", + fromsubclass_=False, + pretty_print=True, + ): + super(EIF_cond_alpha_isfa_ista, self).exportChildren( + outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print + ) + def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -23962,16 +44752,24 @@ def build(self, node): nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self + def buildAttributes(self, node, attrs, already_processed): - super(EIF_cond_alpha_isfa_ista, self).buildAttributes(node, attrs, already_processed) + super(EIF_cond_alpha_isfa_ista, self).buildAttributes( + node, attrs, already_processed + ) + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(EIF_cond_alpha_isfa_ista, self).buildChildren(child_, node, nodeName_, True) + super(EIF_cond_alpha_isfa_ista, self).buildChildren( + child_, node, nodeName_, True + ) pass + + # end class EIF_cond_alpha_isfa_ista GDSClassesMapping = { - 'neuroml': NeuroMLDocument, + "neuroml": NeuroMLDocument, } @@ -23999,18 +44797,18 @@ def parse(inFileName, silence=False): rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: - rootTag = 'Property' + rootTag = "Property" rootClass = Property rootObj = rootClass.factory() rootObj.build(rootNode) # Enable Python to collect the space used by the DOM. doc = None -## if not silence: -## sys.stdout.write('\n') -## rootObj.export( -## sys.stdout, 0, name_=rootTag, -## namespacedef_='', -## pretty_print=True) + ## if not silence: + ## sys.stdout.write('\n') + ## rootObj.export( + ## sys.stdout, 0, name_=rootTag, + ## namespacedef_='', + ## pretty_print=True) return rootObj @@ -24020,7 +44818,7 @@ def parseEtree(inFileName, silence=False): rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: - rootTag = 'Property' + rootTag = "Property" rootClass = Property rootObj = rootClass.factory() rootObj.build(rootNode) @@ -24029,38 +44827,38 @@ def parseEtree(inFileName, silence=False): mapping = {} rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) -## if not silence: -## content = etree_.tostring( -## rootElement, pretty_print=True, -## xml_declaration=True, encoding="utf-8") -## sys.stdout.write(content) -## sys.stdout.write('\n') + ## if not silence: + ## content = etree_.tostring( + ## rootElement, pretty_print=True, + ## xml_declaration=True, encoding="utf-8") + ## sys.stdout.write(content) + ## sys.stdout.write('\n') return rootObj, rootElement, mapping, reverse_mapping def parseString(inString, silence=False): - '''Parse a string, create the object tree, and export it. + """Parse a string, create the object tree, and export it. Arguments: - inString -- A string. This XML fragment should not start with an XML declaration containing an encoding. - silence -- A boolean. If False, export the object. Returns -- The root object in the tree. - ''' + """ parser = None - rootNode= parsexmlstring_(inString, parser) + rootNode = parsexmlstring_(inString, parser) rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: - rootTag = 'Property' + rootTag = "Property" rootClass = Property rootObj = rootClass.factory() rootObj.build(rootNode) # Enable Python to collect the space used by the DOM. -## if not silence: -## sys.stdout.write('\n') -## rootObj.export( -## sys.stdout, 0, name_=rootTag, -## namespacedef_='') + ## if not silence: + ## sys.stdout.write('\n') + ## rootObj.export( + ## sys.stdout, 0, name_=rootTag, + ## namespacedef_='') return rootObj @@ -24070,18 +44868,18 @@ def parseLiteral(inFileName, silence=False): rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: - rootTag = 'Property' + rootTag = "Property" rootClass = Property rootObj = rootClass.factory() rootObj.build(rootNode) # Enable Python to collect the space used by the DOM. doc = None -## if not silence: -## sys.stdout.write('#from nml import *\n\n') -## sys.stdout.write('import nml as model_\n\n') -## sys.stdout.write('rootObj = model_.rootClass(\n') -## rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) -## sys.stdout.write(')\n') + ## if not silence: + ## sys.stdout.write('#from nml import *\n\n') + ## sys.stdout.write('import nml as model_\n\n') + ## sys.stdout.write('rootObj = model_.rootClass(\n') + ## rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + ## sys.stdout.write(')\n') return rootObj @@ -24093,8 +44891,8 @@ def main(): usage() -if __name__ == '__main__': - #import pdb; pdb.set_trace() +if __name__ == "__main__": + # import pdb; pdb.set_trace() main() @@ -24287,5 +45085,5 @@ def main(): "VoltageClampTriple", "basePyNNCell", "basePyNNIaFCell", - "basePyNNIaFCondCell" + "basePyNNIaFCondCell", ] diff --git a/neuroml/test/misc_tests.py b/neuroml/test/misc_tests.py index ac14a6d7..c11e498a 100644 --- a/neuroml/test/misc_tests.py +++ b/neuroml/test/misc_tests.py @@ -13,19 +13,19 @@ except ImportError: import unittest -class TestCommonProperties(unittest.TestCase): +class TestCommonProperties(unittest.TestCase): def test_instatiation(self): """ Since classes are auto-generated, need to test that they correctly instantiate """ - - for name,test_class in inspect.getmembers(sys.modules[neuroml.__name__]): + + for name, test_class in inspect.getmembers(sys.modules[neuroml.__name__]): if sys.version_info >= (2, 7): print(sys.version_info) if inspect.isclass(test_class): ob = test_class() - self.assertIsInstance(ob,test_class) + self.assertIsInstance(ob, test_class) else: print("Warning - Python<2.7 does not support this test") pass diff --git a/neuroml/test/test_arraymorph.py b/neuroml/test/test_arraymorph.py index e8fb887f..a81b7eec 100644 --- a/neuroml/test/test_arraymorph.py +++ b/neuroml/test/test_arraymorph.py @@ -10,50 +10,52 @@ except ImportError: import unittest -class TestObjectBuiltMorphology(unittest.TestCase): +class TestObjectBuiltMorphology(unittest.TestCase): def setUp(self): """ Testing a complex hand-built morphology (from neuroml objects rather than arrays) """ - p = neuroml.Point3DWithDiam(x=0,y=0,z=0,diameter=50) - d = neuroml.Point3DWithDiam(x=50,y=0,z=0,diameter=50) + p = neuroml.Point3DWithDiam(x=0, y=0, z=0, diameter=50) + d = neuroml.Point3DWithDiam(x=50, y=0, z=0, diameter=50) soma = neuroml.Segment(proximal=p, distal=d) - soma.name = 'Soma' + soma.name = "Soma" soma.id = 0 - - #now make an axon with 100 compartments: - + + # now make an axon with 100 compartments: + parent = neuroml.SegmentParent(segments=soma.id) parent_segment = soma axon_segments = [] seg_id = 1 for i in range(100): - p = neuroml.Point3DWithDiam(x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=parent_segment.distal.x+10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - axon_segment = neuroml.Segment(proximal = p, - distal = d, - parent = parent) - + p = neuroml.Point3DWithDiam( + x=parent_segment.distal.x, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + d = neuroml.Point3DWithDiam( + x=parent_segment.distal.x + 10, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + axon_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) + axon_segment.id = seg_id - - axon_segment.name = 'axon_segment_' + str(axon_segment.id) - - #now reset everything: + + axon_segment.name = "axon_segment_" + str(axon_segment.id) + + # now reset everything: parent = neuroml.SegmentParent(segments=axon_segment.id) parent_segment = axon_segment - seg_id += 1 - + seg_id += 1 + axon_segments.append(axon_segment) test_morphology = am.ArrayMorphology() @@ -69,106 +71,118 @@ def test_valid_morphology_ids(self): def test_invalid_morphology_ids(self): morphology = self.test_morphology - morphology.segments[0].id = 5 + morphology.segments[0].id = 5 self.assertFalse(morphology.valid_ids) def test_num_segments(self): num_segments = len(self.test_morphology.segments) - self.assertEqual(num_segments,101) + self.assertEqual(num_segments, 101) def test_segments_ids_ok(self): - self.assertEqual(self.test_morphology.segments[30].id,30) + self.assertEqual(self.test_morphology.segments[30].id, 30) def test_soma_still_located_at_zero(self): - self.assertEqual(self.test_morphology.segments[0].name,'Soma') - self.assertEqual(self.test_morphology.segments[0].id,0) + self.assertEqual(self.test_morphology.segments[0].name, "Soma") + self.assertEqual(self.test_morphology.segments[0].id, 0) def test_segment_vertices_ok(self): - self.assertEqual(self.test_morphology.segments[1].proximal.x,50.0) - + self.assertEqual(self.test_morphology.segments[1].proximal.x, 50.0) + def test_axon_names_ok(self): - self.assertEqual(self.test_morphology.segments[32].name,'axon_segment_32') + self.assertEqual(self.test_morphology.segments[32].name, "axon_segment_32") def test_segment_instance(self): seg = self.test_morphology.segments[47] - self.assertIsInstance(seg,neuroml.nml.nml.Segment) + self.assertIsInstance(seg, neuroml.nml.nml.Segment) -class TestArrayMorphology(unittest.TestCase): +class TestArrayMorphology(unittest.TestCase): def setUp(self): num_segments = int(100) num_vertices = num_segments + 1 - x = np.linspace(0,10,num_vertices) + x = np.linspace(0, 10, num_vertices) y = np.zeros(num_vertices) z = np.zeros(num_vertices) - d = np.linspace(1,0.01,num_vertices) + d = np.linspace(1, 0.01, num_vertices) - connectivity = range(-1,num_segments) + connectivity = range(-1, num_segments) - vertices = np.array([x,y,z,d]).T + vertices = np.array([x, y, z, d]).T self.complex_vertices = vertices - + physical_mask = np.zeros(num_vertices) - #third segment is non-physical: + # third segment is non-physical: physical_mask[2] = 1 physical_mask[20] = 1 - - self.complex_morphology = am.ArrayMorphology(vertices=vertices, - connectivity=connectivity, - physical_mask=physical_mask, - id = 'test_arraymorph') - - self.valid_vertices = [[0,0,0,0.1], - [1,0,0,0.2], - [2,0,0,0.3], - [3,0,0,0.4]] - - self.valid_connectivity = [-1,0,1,2] - - self.optimized_morphology = am.ArrayMorphology(vertices=self.valid_vertices, - connectivity=self.valid_connectivity, - id = 'test_arraymorph') - - proximal_point = neuroml.Point3DWithDiam(x=0.1, - y=0.2, - z=0.3, - diameter=1.1,) - - distal_point = neuroml.Point3DWithDiam(x=0.0, - y=0.0, - z=0.0, - diameter=1.1,) - - soma = neuroml.Segment(proximal = proximal_point, - distal = distal_point,) + + self.complex_morphology = am.ArrayMorphology( + vertices=vertices, + connectivity=connectivity, + physical_mask=physical_mask, + id="test_arraymorph", + ) + + self.valid_vertices = [ + [0, 0, 0, 0.1], + [1, 0, 0, 0.2], + [2, 0, 0, 0.3], + [3, 0, 0, 0.4], + ] + + self.valid_connectivity = [-1, 0, 1, 2] + + self.optimized_morphology = am.ArrayMorphology( + vertices=self.valid_vertices, + connectivity=self.valid_connectivity, + id="test_arraymorph", + ) + + proximal_point = neuroml.Point3DWithDiam( + x=0.1, + y=0.2, + z=0.3, + diameter=1.1, + ) + + distal_point = neuroml.Point3DWithDiam( + x=0.0, + y=0.0, + z=0.0, + diameter=1.1, + ) + + soma = neuroml.Segment( + proximal=proximal_point, + distal=distal_point, + ) self.small_morphology = am.ArrayMorphology() self.small_morphology.segments.append(soma) def test_single_segment_morphology_instantiation(self): print(self.small_morphology.connectivity) seg = self.small_morphology.segments[0] - self.assertIsInstance(seg,neuroml.nml.nml.Segment) + self.assertIsInstance(seg, neuroml.nml.nml.Segment) def test_single_segment_morphology_length(self): - self.assertEqual(len(self.small_morphology.segments),1) + self.assertEqual(len(self.small_morphology.segments), 1) def test_index_error(self): """ There is no segments[1] for a one-segment morphology """ - self.assertRaises(IndexError,self.small_morphology.segments.__getitem__,1) - + self.assertRaises(IndexError, self.small_morphology.segments.__getitem__, 1) + def test_single_floating_segment(self): """ Because physical_mask[4] = 1 a segment should be skipped as it is floating. """ - + seg = self.complex_morphology.segments[3] seg_proximal_x = seg.proximal.x seg_distal_x = seg.distal.x @@ -176,15 +190,15 @@ def test_single_floating_segment(self): equivalent_proximal_vertex = self.complex_vertices[5][0] equivalent_distal_vertex = self.complex_vertices[4][0] - self.assertEqual(seg_proximal_x,equivalent_proximal_vertex) - self.assertEqual(seg_distal_x,equivalent_distal_vertex) + self.assertEqual(seg_proximal_x, equivalent_proximal_vertex) + self.assertEqual(seg_distal_x, equivalent_distal_vertex) def test_double_floating_segment(self): """ Because physical_mask[4] = 1 a segment should be skipped as it is floating. """ - + seg = self.complex_morphology.segments[3] seg_proximal_x = seg.proximal.x seg_distal_x = seg.distal.x @@ -192,14 +206,13 @@ def test_double_floating_segment(self): equivalent_proximal_vertex = self.complex_vertices[5][0] equivalent_distal_vertex = self.complex_vertices[4][0] - self.assertEqual(seg_proximal_x,equivalent_proximal_vertex) - self.assertEqual(seg_distal_x,equivalent_distal_vertex) - + self.assertEqual(seg_proximal_x, equivalent_proximal_vertex) + self.assertEqual(seg_distal_x, equivalent_distal_vertex) def test_segments_len(self): num_segments = 98 len_segment_list = len(self.complex_morphology.segments) - self.assertEqual(num_segments,len_segment_list) + self.assertEqual(num_segments, len_segment_list) def test_add_segment_len(self): """ @@ -207,19 +220,22 @@ def test_add_segment_len(self): and distal vertices should be used. The internal connectivity should be passed. """ - - proximal_point = neuroml.Point3DWithDiam(x=0.1, - y=0.2, - z=0.3, - diameter=1.1,) - distal_point = neuroml.Point3DWithDiam(x=0.0, - y=0.0, - z=0.0, - diameter=1.1,) + proximal_point = neuroml.Point3DWithDiam( + x=0.1, + y=0.2, + z=0.3, + diameter=1.1, + ) - seg = neuroml.Segment(proximal = proximal_point, - distal = distal_point) + distal_point = neuroml.Point3DWithDiam( + x=0.0, + y=0.0, + z=0.0, + diameter=1.1, + ) + + seg = neuroml.Segment(proximal=proximal_point, distal=distal_point) num_segments = len(self.complex_morphology.segments) @@ -227,40 +243,42 @@ def test_add_segment_len(self): len_segment_list = len(self.complex_morphology.segments) - self.assertEqual(num_segments+1, len_segment_list) + self.assertEqual(num_segments + 1, len_segment_list) self.setUp() def test_add_segment_vertices_added(self): - proximal_point = neuroml.Point3DWithDiam(x=0.1, - y=0.2, - z=0.3, - diameter=0.1,) + proximal_point = neuroml.Point3DWithDiam( + x=0.1, + y=0.2, + z=0.3, + diameter=0.1, + ) - distal_point = neuroml.Point3DWithDiam(x=0.0, - y=0.0, - z=0.0, - diameter=0.1) + distal_point = neuroml.Point3DWithDiam(x=0.0, y=0.0, z=0.0, diameter=0.1) - seg = neuroml.Segment(proximal = proximal_point, - distal = distal_point) + seg = neuroml.Segment(proximal=proximal_point, distal=distal_point) num_segments = len(self.complex_morphology.segments) self.optimized_morphology.segments.append(seg) true_vertices = self.optimized_morphology.vertices - expected_vertices = np.array([[0,0,0,0.1], - [1,0,0,0.2], - [2,0,0,0.3], - [3,0,0,0.4], - [0,0,0,0.1], - [0.1,0.2,0.3,0.1],]) - - arrays_equal = np.array_equal(true_vertices,expected_vertices) + expected_vertices = np.array( + [ + [0, 0, 0, 0.1], + [1, 0, 0, 0.2], + [2, 0, 0, 0.3], + [3, 0, 0, 0.4], + [0, 0, 0, 0.1], + [0.1, 0.2, 0.3, 0.1], + ] + ) + + arrays_equal = np.array_equal(true_vertices, expected_vertices) self.assertTrue(arrays_equal) self.setUp() - + def tes_add_segment_connectivity_valid(self): pass @@ -269,7 +287,7 @@ def test_num_vertices(self): Morphology with one segment """ - self.assertEqual(self.optimized_morphology.num_vertices,4) + self.assertEqual(self.optimized_morphology.num_vertices, 4) def test_valid_morphology(self): """ @@ -280,99 +298,89 @@ def test_valid_morphology(self): # does not like. # Ignore the VisibleDeprecationWarning that numpy throws. with warnings.catch_warnings(): - warnings.filterwarnings("ignore", "Creating an ndarray from ragged nested sequences") - vertices=[[0,0,0],[1,1]] - connectivity=[-1,0] - self.assertRaises(AssertionError,am.ArrayMorphology,vertices,connectivity) - - vertices=[[0,0,0],[1,1,1]] - connectivity=[-1,0,0] - self.assertRaises(AssertionError,am.ArrayMorphology,vertices,connectivity) - - vertices=[[0,0,0],[1,1,1]] - connectivity=[] - self.assertRaises(AssertionError,am.ArrayMorphology,vertices,connectivity) + warnings.filterwarnings( + "ignore", "Creating an ndarray from ragged nested sequences" + ) + vertices = [[0, 0, 0], [1, 1]] + connectivity = [-1, 0] + self.assertRaises( + AssertionError, am.ArrayMorphology, vertices, connectivity + ) + + vertices = [[0, 0, 0], [1, 1, 1]] + connectivity = [-1, 0, 0] + self.assertRaises(AssertionError, am.ArrayMorphology, vertices, connectivity) + + vertices = [[0, 0, 0], [1, 1, 1]] + connectivity = [] + self.assertRaises(AssertionError, am.ArrayMorphology, vertices, connectivity) def test_root_index(self): - self.assertEqual(self.optimized_morphology.root_index,0) - + self.assertEqual(self.optimized_morphology.root_index, 0) + def test_physical_indeces(self): physical_indices = self.optimized_morphology.physical_indices - self.assertTrue(np.array_equal(physical_indices,[0,1,2,3])) + self.assertTrue(np.array_equal(physical_indices, [0, 1, 2, 3])) def test_children(self): - self.assertTrue(self.optimized_morphology.children(1),2) + self.assertTrue(self.optimized_morphology.children(1), 2) def test_to_root(self): - new_morphology = am.ArrayMorphology(self.optimized_morphology.vertices, - self.optimized_morphology.connectivity) + new_morphology = am.ArrayMorphology( + self.optimized_morphology.vertices, self.optimized_morphology.connectivity + ) new_morphology.to_root(2) new_connectivity = new_morphology.connectivity - self.assertTrue(np.array_equal(new_connectivity,[1,2,-1,2])) + self.assertTrue(np.array_equal(new_connectivity, [1, 2, -1, 2])) def test_to_neuroml_morphology(self): neuroml_morphology = self.optimized_morphology.to_neuroml_morphology(id="Test") - self.assertEqual(neuroml_morphology.id,"Test") - self.assertEqual(len(neuroml_morphology.segments),3) - + self.assertEqual(neuroml_morphology.id, "Test") + self.assertEqual(len(neuroml_morphology.segments), 3) def test_pop(self): - new_morphology = am.ArrayMorphology(self.optimized_morphology.vertices, - self.optimized_morphology.connectivity)# - + new_morphology = am.ArrayMorphology( + self.optimized_morphology.vertices, self.optimized_morphology.connectivity + ) # + new_morphology.pop(1) new_connectivity = new_morphology.connectivity - self.assertTrue(np.array_equal(new_connectivity,[-1,0,1])) + self.assertTrue(np.array_equal(new_connectivity, [-1, 0, 1])) def test_segment_getter(self): segment = self.optimized_morphology.segments[0] - self.assertIsInstance(segment,neuroml.Segment) - self.assertEqual(segment.proximal.diameter,0.2) - self.assertEqual(segment.distal.diameter,0.1) + self.assertIsInstance(segment, neuroml.Segment) + self.assertEqual(segment.proximal.diameter, 0.2) + self.assertEqual(segment.distal.diameter, 0.1) def test_segmentlist_getter(self): segment = self.optimized_morphology.segments[1] segment_again = self.optimized_morphology.segments[1] - self.assertEqual(segment,segment_again) + self.assertEqual(segment, segment_again) def test_segmentlist_setter(self): - p = neuroml.Point3DWithDiam(x=0.9, - y=0.0, - z=0.0, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=0.0, - y=0.0, - z=0.0, - diameter=0.1) + p = neuroml.Point3DWithDiam(x=0.9, y=0.0, z=0.0, diameter=0.1) - new_segment = neuroml.Segment(proximal=p, - distal=d) + d = neuroml.Point3DWithDiam(x=0.0, y=0.0, z=0.0, diameter=0.1) + + new_segment = neuroml.Segment(proximal=p, distal=d) self.optimized_morphology.segments[2] = new_segment - self.assertEqual(self.optimized_morphology.segments[2],new_segment) + self.assertEqual(self.optimized_morphology.segments[2], new_segment) def test_segmentlist_setter_by_inference(self): - p = neuroml.Point3DWithDiam(x=0.9, - y=0.0, - z=0.0, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=0.0, - y=0.0, - z=0.0, - diameter=0.1) + p = neuroml.Point3DWithDiam(x=0.9, y=0.0, z=0.0, diameter=0.1) - new_segment = neuroml.Segment(proximal=p, - distal=d) + d = neuroml.Point3DWithDiam(x=0.0, y=0.0, z=0.0, diameter=0.1) - self.optimized_morphology.segments[2] = new_segment - self.assertEqual(self.optimized_morphology.segments[2].proximal.x,0.9) + new_segment = neuroml.Segment(proximal=p, distal=d) + self.optimized_morphology.segments[2] = new_segment + self.assertEqual(self.optimized_morphology.segments[2].proximal.x, 0.9) def test_instantiation(self): """ @@ -389,9 +397,9 @@ def test_parents(self): test_segment_1 = self.optimized_morphology.segments[0] test_segment_2 = self.optimized_morphology.segments[1] - self.assertEqual(test_segment_1.id,1) - self.assertEqual(test_segment_2.id,2) - self.assertEqual(test_segment_2.parent.segments,1) + self.assertEqual(test_segment_1.id, 1) + self.assertEqual(test_segment_2.id, 2) + self.assertEqual(test_segment_2.parent.segments, 1) self.assertIsNone(test_segment_1.parent) def test_valid_morphology_ids(self): @@ -400,7 +408,7 @@ def test_valid_morphology_ids(self): def test_invalid_morphology_ids(self): morphology = self.optimized_morphology - morphology.segments[0].id = 5 + morphology.segments[0].id = 5 self.assertFalse(morphology.valid_ids) def test_large_arraymorph(self): @@ -414,25 +422,28 @@ def test_large_arraymorph(self): num_segments = int(1e6) num_vertices = num_segments + 1 - x = np.linspace(0,10,num_vertices) + x = np.linspace(0, 10, num_vertices) y = np.zeros(num_vertices) z = np.zeros(num_vertices) - d = np.linspace(1,0.01,num_vertices) + d = np.linspace(1, 0.01, num_vertices) + + vertices = np.array([x, y, z, d]).T + + connectivity = range(-1, num_segments) - vertices = np.array([x,y,z,d]).T - - connectivity = range(-1,num_segments) + big_arraymorph = am.ArrayMorphology( + vertices=vertices, connectivity=connectivity + ) - big_arraymorph = am.ArrayMorphology(vertices = vertices, - connectivity = connectivity) + self.assertIsInstance(big_arraymorph.segments[3], neuroml.Segment) - self.assertIsInstance(big_arraymorph.segments[3],neuroml.Segment) + self.assertEqual(big_arraymorph.segments[0].distal.diameter, 1.0) + # following test not as obvious as it seems - first execution of getter does not have the same result as second + self.assertEqual(big_arraymorph.segments[2333], big_arraymorph.segments[2333]) - self.assertEqual(big_arraymorph.segments[0].distal.diameter,1.0) - #following test not as obvious as it seems - first execution of getter does not have the same result as second - self.assertEqual(big_arraymorph.segments[2333],big_arraymorph.segments[2333]) - - self.assertEqual(big_arraymorph.segments[0].distal.diameter,1.0) - self.assertEqual(big_arraymorph.segments[num_segments-1].proximal.x,10.0) - self.assertEqual(big_arraymorph.segments[0].distal.x,0.0) - self.assertEqual(big_arraymorph.segments[num_segments-1].proximal.diameter,0.01) + self.assertEqual(big_arraymorph.segments[0].distal.diameter, 1.0) + self.assertEqual(big_arraymorph.segments[num_segments - 1].proximal.x, 10.0) + self.assertEqual(big_arraymorph.segments[0].distal.x, 0.0) + self.assertEqual( + big_arraymorph.segments[num_segments - 1].proximal.diameter, 0.01 + ) diff --git a/neuroml/test/test_cell.py b/neuroml/test/test_cell.py index 89a24686..f0a239d3 100644 --- a/neuroml/test/test_cell.py +++ b/neuroml/test/test_cell.py @@ -24,84 +24,92 @@ class TestCell(unittest.TestCase): - def test_cell_methods(self): - cells = ['Purk2M9s','pyr_4_sym.cell'] + cells = ["Purk2M9s", "pyr_4_sym.cell"] - cells = ['pyr_4_sym'] + cells = ["pyr_4_sym"] for cell_name in cells: - local_path = '../examples/test_files/%s.cell.nml'%cell_name + local_path = "../examples/test_files/%s.cell.nml" % cell_name if os.path.isfile(local_path): test_file_path = local_path else: root_dir = os.path.dirname(neuroml.__file__) - test_file_path = os.path.join(root_dir,'examples/test_files/%s.cell.nml'%cell_name) - print('test file path is: '+test_file_path) + test_file_path = os.path.join( + root_dir, "examples/test_files/%s.cell.nml" % cell_name + ) + print("test file path is: " + test_file_path) doc = loaders.NeuroMLLoader.load(test_file_path) cell = doc.cells[0] - self.assertEqual(cell.id,cell_name.split('.')[0]) + self.assertEqual(cell.id, cell_name.split(".")[0]) exp_num_segs = 9 - self.assertEqual(cell.morphology.num_segments,exp_num_segs) - self.assertEqual(len(cell.get_segment_ids_vs_segments()),exp_num_segs) - self.assertRaises(Exception, lambda: cell.get_segment(-1)) # Seg -1 doesn't exist... + self.assertEqual(cell.morphology.num_segments, exp_num_segs) + self.assertEqual(len(cell.get_segment_ids_vs_segments()), exp_num_segs) + self.assertRaises( + Exception, lambda: cell.get_segment(-1) + ) # Seg -1 doesn't exist... cell.summary() - #cell.get_ordered_segments_in_groups = get_ordered_segments_in_groups + # cell.get_ordered_segments_in_groups = get_ordered_segments_in_groups - for grp in ['soma_group', ['soma_group','basal_dends'],['dendrite_group'],['all']]: + for grp in [ + "soma_group", + ["soma_group", "basal_dends"], + ["dendrite_group"], + ["all"], + ]: print("-----------------------------") - print(" Testing %s..."%grp) + print(" Testing %s..." % grp) segs, cuml, path_prox, path_dist = cell.get_ordered_segments_in_groups( grp, - check_parentage=(grp==['all']), + check_parentage=(grp == ["all"]), include_cumulative_lengths=True, - include_path_lengths=True + include_path_lengths=True, ) - print("Segs %s: %s"%(grp,segs)) - print("Cuml %s: %s"%(grp,cuml)) - print("Prox %s: %s"%(grp,path_prox)) - print("Dist %s: %s"%(grp,path_dist)) + print("Segs %s: %s" % (grp, segs)) + print("Cuml %s: %s" % (grp, cuml)) + print("Prox %s: %s" % (grp, path_prox)) + print("Dist %s: %s" % (grp, path_dist)) for s in segs: - assert len(segs[s])==len(cuml[s]) + assert len(segs[s]) == len(cuml[s]) ##assert len(segs[s])==len(path_prox[s]) ##assert len(segs[s])==len(path_dist[s]) - if grp=='soma_group': - assert len(segs['soma_group'])==1 - soma_len = cuml['soma_group'][-1] - print("soma_len: %s"%soma_len) + if grp == "soma_group": + assert len(segs["soma_group"]) == 1 + soma_len = cuml["soma_group"][-1] + print("soma_len: %s" % soma_len) - if grp==['all']: - assert len(segs['all'])==9 - all_len = cuml['all'][-1] + if grp == ["all"]: + assert len(segs["all"]) == 9 + all_len = cuml["all"][-1] - if grp==['dendrite_group']: - assert len(segs['dendrite_group'])==8 - dend_len = cuml['dendrite_group'][-1] - print("dend_len: %s"%dend_len) + if grp == ["dendrite_group"]: + assert len(segs["dendrite_group"]) == 8 + dend_len = cuml["dendrite_group"][-1] + print("dend_len: %s" % dend_len) - assert all_len == soma_len+dend_len + assert all_len == soma_len + dend_len def test_cell_methods2(self): - cell = Cell(id='cell0') + cell = Cell(id="cell0") - diam = 1. - d0=Point3DWithDiam(x=0, y=0, z=0, diameter=diam) - p=Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + diam = 1.0 + d0 = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) - seg0 = Segment(id=0, name='soma',proximal=p, distal=d0) + seg0 = Segment(id=0, name="soma", proximal=p, distal=d0) - d1=Point3DWithDiam(x=10, y=0, z=0, diameter=diam) + d1 = Point3DWithDiam(x=10, y=0, z=0, diameter=diam) cell.morphology = Morphology() cell.morphology.segments.append(seg0) @@ -109,67 +117,78 @@ def test_cell_methods2(self): seg1 = Segment(id=1, distal=d1, parent=SegmentParent(0)) cell.morphology.segments.append(seg1) - d2=Point3DWithDiam(x=20, y=0, z=0, diameter=diam) + d2 = Point3DWithDiam(x=20, y=0, z=0, diameter=diam) seg2 = Segment(id=2, proximal=d1, distal=d2, parent=SegmentParent(seg1.id)) cell.morphology.segments.append(seg2) - d3=Point3DWithDiam(x=20, y=10, z=0, diameter=diam) + d3 = Point3DWithDiam(x=20, y=10, z=0, diameter=diam) seg3 = Segment(id=3, distal=d3, parent=SegmentParent(seg2.id, fraction_along=1)) cell.morphology.segments.append(seg3) - sg1 = SegmentGroup(id='all') - for seg in [seg0,seg1,seg2,seg3]: + sg1 = SegmentGroup(id="all") + for seg in [seg0, seg1, seg2, seg3]: sg1.members.append(Member(seg.id)) cell.morphology.segment_groups.append(sg1) - sg2 = SegmentGroup(id='soma_group') + sg2 = SegmentGroup(id="soma_group") for seg in [seg0]: sg2.members.append(Member(seg.id)) cell.morphology.segment_groups.append(sg2) - sg3 = SegmentGroup(id='dend_group') - for seg in [seg1,seg2,seg3]: + sg3 = SegmentGroup(id="dend_group") + for seg in [seg1, seg2, seg3]: sg3.members.append(Member(seg.id)) cell.morphology.segment_groups.append(sg3) - sg4 = SegmentGroup(id='soma_dends') - for sg in [sg2,sg3]: + sg4 = SegmentGroup(id="soma_dends") + for sg in [sg2, sg3]: sg4.includes.append(Include(sg.id)) cell.morphology.segment_groups.append(sg4) - expected = {sg1.id:4,sg2.id:1,sg3.id:3,sg4.id:4} + expected = {sg1.id: 4, sg2.id: 1, sg3.id: 3, sg4.id: 4} - for sg in [sg1,sg2,sg3,sg4]: + for sg in [sg1, sg2, sg3, sg4]: segs = cell.get_all_segments_in_group(sg.id) - print('\nSeg group %s has segments: %s'%(sg,segs)) - self.assertEqual(expected[sg.id],len(segs)) + print("\nSeg group %s has segments: %s" % (sg, segs)) + self.assertEqual(expected[sg.id], len(segs)) osegs = cell.get_ordered_segments_in_groups(sg.id) - print('Seg group %s has ordered segments: %s'%(sg.id,osegs)) - self.assertEqual(expected[sg.id],len(osegs[sg.id])) - - ord_segs, cumulative_lengths, path_lengths_to_proximal, path_lengths_to_distal = cell.get_ordered_segments_in_groups( - sg.id, - include_cumulative_lengths=True, - include_path_lengths=True + print("Seg group %s has ordered segments: %s" % (sg.id, osegs)) + self.assertEqual(expected[sg.id], len(osegs[sg.id])) + + ( + ord_segs, + cumulative_lengths, + path_lengths_to_proximal, + path_lengths_to_distal, + ) = cell.get_ordered_segments_in_groups( + sg.id, include_cumulative_lengths=True, include_path_lengths=True ) - print('Seg group %s has cumulative_lengths: %s'%(sg.id,cumulative_lengths)) - self.assertEqual(expected[sg.id],len(cumulative_lengths[sg.id])) + print( + "Seg group %s has cumulative_lengths: %s" % (sg.id, cumulative_lengths) + ) + self.assertEqual(expected[sg.id], len(cumulative_lengths[sg.id])) - print('Seg group %s has path_lengths_to_proximal: %s'%(sg.id,path_lengths_to_proximal)) - self.assertEqual(expected[sg.id],len(path_lengths_to_proximal[sg.id])) + print( + "Seg group %s has path_lengths_to_proximal: %s" + % (sg.id, path_lengths_to_proximal) + ) + self.assertEqual(expected[sg.id], len(path_lengths_to_proximal[sg.id])) - print('Seg group %s has path_lengths_to_distal: %s'%(sg.id,path_lengths_to_distal)) - self.assertEqual(expected[sg.id],len(path_lengths_to_distal[sg.id])) + print( + "Seg group %s has path_lengths_to_distal: %s" + % (sg.id, path_lengths_to_distal) + ) + self.assertEqual(expected[sg.id], len(path_lengths_to_distal[sg.id])) def runTest(self): print("Running tests in TestCell") -if __name__ == '__main__': +if __name__ == "__main__": ta = TestCell() ta.test_cell_methods() diff --git a/neuroml/test/test_hdf5_optimized.py b/neuroml/test/test_hdf5_optimized.py index 300cfdaa..63837f6b 100644 --- a/neuroml/test/test_hdf5_optimized.py +++ b/neuroml/test/test_hdf5_optimized.py @@ -19,64 +19,66 @@ class TestNeuroMLHDF5Optimized(unittest.TestCase): base_dir = os.path.dirname(__file__) - #base_dir = '.' + # base_dir = '.' def runTest(self): print("Running tests in TestNeuroMLHDF5Optimized") def test_write_load(self): - #for f in []: - #for f in ['complete.nml']: - #for f in ['simplenet.nml','testh5.nml','MediumNet.net.nml','complete.nml']: + # for f in []: + # for f in ['complete.nml']: + # for f in ['simplenet.nml','testh5.nml','MediumNet.net.nml','complete.nml']: - for f in ['simplenet.nml','MediumNet.net.nml']: - file_name = '%s/../examples/test_files/%s'%(self.base_dir,f) + for f in ["simplenet.nml", "MediumNet.net.nml"]: + file_name = "%s/../examples/test_files/%s" % (self.base_dir, f) - print("Loading %s"%file_name) + print("Loading %s" % file_name) - nml_doc0 = loaders.read_neuroml2_file(file_name,include_includes=True) + nml_doc0 = loaders.read_neuroml2_file(file_name, include_includes=True) summary0 = nml_doc0.summary() print(summary0) - nml_h5_file = '%s/../examples/tmp/%s__1.h5'%(self.base_dir,f) + nml_h5_file = "%s/../examples/tmp/%s__1.h5" % (self.base_dir, f) writers.NeuroMLHdf5Writer.write(nml_doc0, nml_h5_file) - print("Written to: %s"%nml_h5_file) + print("Written to: %s" % nml_h5_file) - nml_doc1 = loaders.read_neuroml2_file(nml_h5_file,include_includes=True,optimized=True) + nml_doc1 = loaders.read_neuroml2_file( + nml_h5_file, include_includes=True, optimized=True + ) - summary1 = nml_doc1.summary().replace(' (optimized)','') - print('\n'+summary1) + summary1 = nml_doc1.summary().replace(" (optimized)", "") + print("\n" + summary1) - compare(summary0,summary1) + compare(summary0, summary1) - nml_h5_file_2 = '%s/../examples/tmp/%s__2.h5'%(self.base_dir,f) + nml_h5_file_2 = "%s/../examples/tmp/%s__2.h5" % (self.base_dir, f) writers.NeuroMLHdf5Writer.write(nml_doc1, nml_h5_file_2) - print("Written to: %s"%nml_h5_file_2) - #exit() - nml_doc2 = loaders.read_neuroml2_file(nml_h5_file_2,include_includes=True) + print("Written to: %s" % nml_h5_file_2) + # exit() + nml_doc2 = loaders.read_neuroml2_file(nml_h5_file_2, include_includes=True) summary2 = nml_doc2.summary() - print("Reloaded: %s"%nml_h5_file_2) - print('\n'+summary2) + print("Reloaded: %s" % nml_h5_file_2) + print("\n" + summary2) - compare(summary0,summary2) + compare(summary0, summary2) - nml_h5_file_3 = '%s/../examples/tmp/%s__3.nml'%(self.base_dir,f) + nml_h5_file_3 = "%s/../examples/tmp/%s__3.nml" % (self.base_dir, f) writers.NeuroMLWriter.write(nml_doc1, nml_h5_file_3) - print("Written to: %s"%nml_h5_file_3) + print("Written to: %s" % nml_h5_file_3) - nml_doc3 = loaders.read_neuroml2_file(nml_h5_file_3,include_includes=True) + nml_doc3 = loaders.read_neuroml2_file(nml_h5_file_3, include_includes=True) summary3 = nml_doc3.summary() - print("Reloaded: %s"%nml_h5_file_3) - print('\n'+summary3) + print("Reloaded: %s" % nml_h5_file_3) + print("\n" + summary3) - compare(summary0,summary3) + compare(summary0, summary3) -if __name__ == '__main__': +if __name__ == "__main__": tnxp = TestNeuroMLHDF5Optimized() tnxp.test_write_load() diff --git a/neuroml/test/test_hdf5_parser.py b/neuroml/test/test_hdf5_parser.py index 1e0a3477..8b5b80f0 100644 --- a/neuroml/test/test_hdf5_parser.py +++ b/neuroml/test/test_hdf5_parser.py @@ -18,58 +18,58 @@ class TestNeuroMLHDF5Parser(unittest.TestCase): base_dir = os.path.dirname(__file__) - #base_dir = '.' + # base_dir = '.' def test_write_load_hdf5(self): - #for f in []: - #for f in ['MediumNet.net.nml']: - for f in ['simplenet.nml','testh5.nml','MediumNet.net.nml','complete.nml']: - file_name = '%s/../examples/test_files/%s'%(self.base_dir,f) + # for f in []: + # for f in ['MediumNet.net.nml']: + for f in ["simplenet.nml", "testh5.nml", "MediumNet.net.nml", "complete.nml"]: + file_name = "%s/../examples/test_files/%s" % (self.base_dir, f) - print("Loading %s"%file_name) + print("Loading %s" % file_name) - nml_doc0 = loaders.read_neuroml2_file(file_name,include_includes=True) + nml_doc0 = loaders.read_neuroml2_file(file_name, include_includes=True) summary0 = nml_doc0.summary() print(summary0) - nml_h5_file = '%s/../examples/tmp/%s.h5'%(self.base_dir,f) + nml_h5_file = "%s/../examples/tmp/%s.h5" % (self.base_dir, f) writers.NeuroMLHdf5Writer.write(nml_doc0, nml_h5_file) - print("Written to: %s"%nml_h5_file) + print("Written to: %s" % nml_h5_file) nml_doc2 = loaders.NeuroMLHdf5Loader.load(nml_h5_file) summary1 = nml_doc2.summary() - print('\n'+summary1) + print("\n" + summary1) - compare(summary0,summary1) + compare(summary0, summary1) def test_parse(self): - file_name = self.base_dir+'/../examples/test_files/testh5.nml' + file_name = self.base_dir + "/../examples/test_files/testh5.nml" nml_doc0 = loaders.NeuroMLLoader.load(file_name) - summary0 = nml_doc0.summary(show_includes=False,show_non_network=False) - print('\n'+summary0) + summary0 = nml_doc0.summary(show_includes=False, show_non_network=False) + print("\n" + summary0) - print('-------------------------------\n\n') + print("-------------------------------\n\n") - nml_h5_file = self.base_dir+'/../examples/tmp/testh5a.nml.h5' + nml_h5_file = self.base_dir + "/../examples/tmp/testh5a.nml.h5" writers.NeuroMLHdf5Writer.write(nml_doc0, nml_h5_file) - print("Written H5 network file to: "+nml_h5_file) + print("Written H5 network file to: " + nml_h5_file) nml_doc2 = loaders.NeuroMLHdf5Loader.load(nml_h5_file) - summary1 = nml_doc2.summary(show_includes=False,show_non_network=False) - print('\n'+summary1) + summary1 = nml_doc2.summary(show_includes=False, show_non_network=False) + print("\n" + summary1) - compare(summary0,summary1) + compare(summary0, summary1) def runTest(self): print("Running tests in TestNeuroMLHDF5Parser") -if __name__ == '__main__': +if __name__ == "__main__": tnxp = TestNeuroMLHDF5Parser() tnxp.test_write_load_hdf5() diff --git a/neuroml/test/test_integration.py b/neuroml/test/test_integration.py index 75bc578c..e5ef4c3f 100644 --- a/neuroml/test/test_integration.py +++ b/neuroml/test/test_integration.py @@ -7,38 +7,38 @@ except ImportError: import unittest -class TestIntegration(unittest.TestCase): +class TestIntegration(unittest.TestCase): def setUp(self): """ Make an optimized morphology, add a couple of segments, save it and then load it back """ - - vertices = [[0,0,0,0.1],[1,0,0,0.2],[2,0,0,0.3],[3,0,0,0.4]] - connectivity = [-1,0,1,2] - - self.optimized_morphology = am.ArrayMorphology(vertices=vertices, - connectivity=connectivity, - id="arraymorph_test") + + vertices = [[0, 0, 0, 0.1], [1, 0, 0, 0.2], [2, 0, 0, 0.3], [3, 0, 0, 0.4]] + connectivity = [-1, 0, 1, 2] + + self.optimized_morphology = am.ArrayMorphology( + vertices=vertices, connectivity=connectivity, id="arraymorph_test" + ) seg = neuroml.Segment() - #TODO: - #self.optimized_morphology.segments.append(seg) + # TODO: + # self.optimized_morphology.segments.append(seg) doc = neuroml.NeuroMLDocument() cell = neuroml.Cell() def test_to_neuroml_morphology_and_write(self): - neuroml_morphology = self.optimized_morphology.to_neuroml_morphology(id="Test") - self.assertEqual(neuroml_morphology.id,"Test") - self.assertEqual(len(neuroml_morphology.segments),3) - self.assertIsNone(writers.NeuroMLWriter.write(neuroml_morphology,'/dev/null')) + neuroml_morphology = self.optimized_morphology.to_neuroml_morphology(id="Test") + self.assertEqual(neuroml_morphology.id, "Test") + self.assertEqual(len(neuroml_morphology.segments), 3) + self.assertIsNone(writers.NeuroMLWriter.write(neuroml_morphology, "/dev/null")) def test_arraymorph_properties(self): - self.assertEqual(self.optimized_morphology.id,"arraymorph_test") - + self.assertEqual(self.optimized_morphology.id, "arraymorph_test") + def test_arraymorph_write(self): - writers.NeuroMLWriter.write(self.optimized_morphology,'/dev/null') + writers.NeuroMLWriter.write(self.optimized_morphology, "/dev/null") diff --git a/neuroml/test/test_loaders.py b/neuroml/test/test_loaders.py index 08498062..b960a121 100644 --- a/neuroml/test/test_loaders.py +++ b/neuroml/test/test_loaders.py @@ -12,19 +12,21 @@ except ImportError: import unittest + class TestNeuroMLLoader(unittest.TestCase): def test_load_neuroml(self): root_dir = os.path.dirname(neuroml.__file__) - print('root dir is: '+root_dir) - test_file_path = os.path.join(root_dir,'examples/test_files/Purk2M9s.nml') - print('test fi le path is: ' + test_file_path) + print("root dir is: " + root_dir) + test_file_path = os.path.join(root_dir, "examples/test_files/Purk2M9s.nml") + print("test fi le path is: " + test_file_path) - f = open(test_file_path,'r') - #print(f.read()) + f = open(test_file_path, "r") + # print(f.read()) doc = loaders.NeuroMLLoader.load(test_file_path) - self.assertEqual(doc.id,'Purk2M9s') + self.assertEqual(doc.id, "Purk2M9s") f.close() - print('Finished test') + print("Finished test") + if __name__ == "__main__": t = TestNeuroMLLoader() diff --git a/neuroml/test/test_morphology.py b/neuroml/test/test_morphology.py index c0366611..69fd3e9a 100644 --- a/neuroml/test/test_morphology.py +++ b/neuroml/test/test_morphology.py @@ -9,22 +9,21 @@ import unittest2 as unittest except ImportError: import unittest - -class TestSingleMorphology(unittest.TestCase): +class TestSingleMorphology(unittest.TestCase): def setUp(self): self.test_morphology = neuroml.Morphology() self.test_morphology.id = "TestMorph" for i in range(10): self.test_morphology.segments.append(neuroml.Segment()) - + def test_id(self): """ Test if Morphology instantiation and id assignment is working """ - self.assertEqual(self.test_morphology.id,"TestMorph") + self.assertEqual(self.test_morphology.id, "TestMorph") def test_num_segments(self): - self.assertEqual(self.test_morphology.num_segments,10) + self.assertEqual(self.test_morphology.num_segments, 10) diff --git a/neuroml/test/test_segment.py b/neuroml/test/test_segment.py index 77342157..99925075 100644 --- a/neuroml/test/test_segment.py +++ b/neuroml/test/test_segment.py @@ -15,25 +15,24 @@ import unittest2 as unittest except ImportError: import unittest - -class TestSingleSegment(unittest.TestCase): +class TestSingleSegment(unittest.TestCase): def test_proximal_diameter(self): - proximal_point = Point3DWithDiam(diameter = 3.21) - seg = Segment(proximal = proximal_point) + proximal_point = Point3DWithDiam(diameter=3.21) + seg = Segment(proximal=proximal_point) self.assertEqual(seg.proximal.diameter, 3.21) def test_distal_diameter(self): - distal_point = Point3DWithDiam(diameter = 3.21) - seg = Segment(distal = distal_point) + distal_point = Point3DWithDiam(diameter=3.21) + seg = Segment(distal=distal_point) - self.assertEqual(seg.distal.diameter,3.21) + self.assertEqual(seg.distal.diameter, 3.21) def test_proximal_coordinates(self): proximal_point = Point3DWithDiam(x=0.1, y=0.2, z=0.3) - seg = Segment(proximal = proximal_point) + seg = Segment(proximal=proximal_point) self.assertEqual(seg.proximal.x, 0.1) self.assertEqual(seg.proximal.y, 0.2) @@ -41,248 +40,206 @@ def test_proximal_coordinates(self): def test_distal_coordinates(self): distal_point = Point3DWithDiam(x=0.1, y=0.2, z=0.3) - seg = Segment(distal = distal_point) + seg = Segment(distal=distal_point) self.assertEqual(seg.distal.x, 0.1) self.assertEqual(seg.distal.y, 0.2) self.assertEqual(seg.distal.z, 0.3) + class TestHelperProperties(unittest.TestCase): """ Tests for helper properties for volume, area etc These are not part of the neuroML schema itself """ + def test_length0(self): - diam = 1. - len = 1. - d=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - p=Point3DWithDiam(x=0, - y=len, - z=0, - diameter=diam) - + diam = 1.0 + len = 1.0 + d = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=len, z=0, diameter=diam) + seg = Segment(proximal=p, distal=d) self.assertAlmostEqual(seg.length, 1, places=7) - + def test_length(self): - d=Point3DWithDiam(x=0.2, - y=2.4, - z=3.5, - diameter=0.6) - p=Point3DWithDiam(x=0.5, - y=2.0, - z=1.8, - diameter=0.9) - + d = Point3DWithDiam(x=0.2, y=2.4, z=3.5, diameter=0.6) + p = Point3DWithDiam(x=0.5, y=2.0, z=1.8, diameter=0.9) + seg = Segment(proximal=p, distal=d) self.assertAlmostEqual(seg.length, 1.772004514, places=7) def test_area0(self): - diam = 1. - len = 1. - d=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - p=Point3DWithDiam(x=0, - y=len, - z=0, - diameter=diam) - + diam = 1.0 + len = 1.0 + d = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=len, z=0, diameter=diam) + seg = Segment(proximal=p, distal=d) - self.assertAlmostEqual(seg.surface_area, math.pi*diam*len, places=7) - + self.assertAlmostEqual(seg.surface_area, math.pi * diam * len, places=7) + def test_area1(self): - diam = 1. - len = 1. - d=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - p=Point3DWithDiam(x=0, - y=len, - z=0, - diameter=diam*1.01) - + diam = 1.0 + len = 1.0 + d = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=len, z=0, diameter=diam * 1.01) + seg = Segment(proximal=p, distal=d) self.assertAlmostEqual(seg.surface_area, 3.15734008, places=7) - - + def test_area(self): - d=Point3DWithDiam(x=0.2, - y=2.4, - z=3.5, - diameter=0.6) - p=Point3DWithDiam(x=0.5, - y=2.0, - z=1.8, - diameter=0.9) - + d = Point3DWithDiam(x=0.2, y=2.4, z=3.5, diameter=0.6) + p = Point3DWithDiam(x=0.5, y=2.0, z=1.8, diameter=0.9) + seg = Segment(proximal=p, distal=d) self.assertAlmostEqual(seg.surface_area, 4.19011944, places=7) - def test_volume0(self): - - diam = 1. - len = 1. - d=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - p=Point3DWithDiam(x=0, - y=len, - z=0, - diameter=diam) + + diam = 1.0 + len = 1.0 + d = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=len, z=0, diameter=diam) seg = Segment(proximal=p, distal=d) - self.assertAlmostEqual(seg.volume, math.pi*(diam/2)**2*len, places=7) - + self.assertAlmostEqual(seg.volume, math.pi * (diam / 2) ** 2 * len, places=7) + def test_volume1(self): - - diam = 1. - len = 1. - d=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - p=Point3DWithDiam(x=0, - y=len, - z=0, - diameter=diam*1.01) + + diam = 1.0 + len = 1.0 + d = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=len, z=0, diameter=diam * 1.01) seg = Segment(proximal=p, distal=d) self.assertAlmostEqual(seg.volume, 0.793278324, places=7) - def test_volume(self): - d=Point3DWithDiam(x=0.2, - y=2.4, - z=3.5, - diameter=0.6) - p=Point3DWithDiam(x=0.5, - y=2.0, - z=1.8, - diameter=0.9) + d = Point3DWithDiam(x=0.2, y=2.4, z=3.5, diameter=0.6) + p = Point3DWithDiam(x=0.5, y=2.0, z=1.8, diameter=0.9) seg = Segment(proximal=p, distal=d) self.assertAlmostEqual(seg.volume, 0.7932855820702964, places=7) - - + def test_spherical(self): - - diam = 1. - d=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - p=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - + + diam = 1.0 + d = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + seg = Segment(id=0, proximal=p, distal=d) self.assertEqual(seg.length, 0) self.assertEqual(seg.surface_area, math.pi) - - + def test_cell_with_segs(self): - - cell = Cell(id='cell0') - - diam = 1. - d0=Point3DWithDiam(x=0, y=0, z=0, diameter=diam) - p=Point3DWithDiam(x=0, y=0, z=0, diameter=diam) - + + cell = Cell(id="cell0") + + diam = 1.0 + d0 = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + seg0 = Segment(id=0, proximal=p, distal=d0) - - d1=Point3DWithDiam(x=10, y=0, z=0, diameter=diam) - + + d1 = Point3DWithDiam(x=10, y=0, z=0, diameter=diam) + cell.morphology = Morphology() cell.morphology.segments.append(seg0) - + seg1 = Segment(id=1, distal=d1, parent=SegmentParent(0)) cell.morphology.segments.append(seg1) - - d2=Point3DWithDiam(x=20, y=0, z=0, diameter=diam) - - seg2 = Segment(id=2, proximal =d1, distal=d2, parent=SegmentParent(seg1.id)) + + d2 = Point3DWithDiam(x=20, y=0, z=0, diameter=diam) + + seg2 = Segment(id=2, proximal=d1, distal=d2, parent=SegmentParent(seg1.id)) cell.morphology.segments.append(seg2) - - d3=Point3DWithDiam(x=15, y=10, z=0, diameter=diam) - - seg3 = Segment(id=3, distal=d3, parent=SegmentParent(seg2.id, fraction_along=0.5)) + + d3 = Point3DWithDiam(x=15, y=10, z=0, diameter=diam) + + seg3 = Segment( + id=3, distal=d3, parent=SegmentParent(seg2.id, fraction_along=0.5) + ) cell.morphology.segments.append(seg3) - for f in [0,0.25,0.5,0.75,1]: + for f in [0, 0.25, 0.5, 0.75, 1]: seg3.parent.fraction_along = f - print('Fract: %s, length: %s, proximal: %s'%(f, cell.get_segment_length(seg3.id), cell.get_actual_proximal(seg3.id))) - + print( + "Fract: %s, length: %s, proximal: %s" + % ( + f, + cell.get_segment_length(seg3.id), + cell.get_actual_proximal(seg3.id), + ) + ) + self.assertEqual(seg0.length, 0) self.assertEqual(cell.get_segment_length(seg0.id), 0) - + self.assertRaises(Exception, lambda: seg1.length) # No proximal self.assertEqual(cell.get_segment_length(seg1.id), 10) - + self.assertEqual(seg2.length, 10) self.assertEqual(cell.get_segment_length(seg2.id), 10) - - + self.assertEqual(seg0.surface_area, math.pi) self.assertEqual(cell.get_segment_surface_area(seg0.id), math.pi) - self.assertRaises(Exception, lambda: seg1.surface_area) # No proximal - self.assertEqual(cell.get_segment_surface_area(seg1.id), math.pi*10) - self.assertEqual(seg2.surface_area, math.pi*10) - self.assertEqual(cell.get_segment_surface_area(seg2.id), math.pi*10) - - v = 4.0/3*math.pi*(diam/2)**3 + self.assertRaises(Exception, lambda: seg1.surface_area) # No proximal + self.assertEqual(cell.get_segment_surface_area(seg1.id), math.pi * 10) + self.assertEqual(seg2.surface_area, math.pi * 10) + self.assertEqual(cell.get_segment_surface_area(seg2.id), math.pi * 10) + + v = 4.0 / 3 * math.pi * (diam / 2) ** 3 self.assertEqual(seg0.volume, v) self.assertEqual(cell.get_segment_volume(seg0.id), v) - v = math.pi*seg2.proximal.diameter/2*seg2.proximal.diameter/2*seg2.length - self.assertRaises(Exception, lambda: seg1.volume) # No proximal + v = ( + math.pi + * seg2.proximal.diameter + / 2 + * seg2.proximal.diameter + / 2 + * seg2.length + ) + self.assertRaises(Exception, lambda: seg1.volume) # No proximal self.assertAlmostEqual(cell.get_segment_volume(seg1.id), v, places=7) self.assertAlmostEqual(seg2.volume, v, places=7) self.assertAlmostEqual(cell.get_segment_volume(seg2.id), v, places=7) - - + # Break the sphere... - seg0.distal.diameter = diam*2 + seg0.distal.diameter = diam * 2 self.assertRaises(Exception, lambda: seg0.surface_area) self.assertRaises(Exception, lambda: seg0.volume) - - print('Passed...') - ''' ''' - + + print("Passed...") + """ """ + def runTest(self): print("Running tests in TestHelperProperties") + class TestAttachedSegments(unittest.TestCase): pass -if __name__ == '__main__': - +if __name__ == "__main__": + ta = TestHelperProperties() - + ta.test_length0() ta.test_length() - + ta.test_area0() ta.test_area1() ta.test_area() - + ta.test_volume0() ta.test_volume1() ta.test_volume() - + ta.test_spherical() - ta.test_cell_with_segs() \ No newline at end of file + ta.test_cell_with_segs() diff --git a/neuroml/test/test_writers.py b/neuroml/test/test_writers.py index e811585b..4d7a4f52 100644 --- a/neuroml/test/test_writers.py +++ b/neuroml/test/test_writers.py @@ -15,51 +15,53 @@ class TestJSONWriter(unittest.TestCase): - def setUp(self): - num_segments = int(1e4) #Per cell + num_segments = int(1e4) # Per cell num_vertices = num_segments + 1 - x = np.linspace(0,10,num_vertices) + x = np.linspace(0, 10, num_vertices) y = np.zeros(num_vertices) z = np.zeros(num_vertices) - d = np.linspace(1,0.01,num_vertices) + d = np.linspace(1, 0.01, num_vertices) - vertices = np.array([x,y,z,d]).T + vertices = np.array([x, y, z, d]).T - connectivity = range(-1,num_segments) + connectivity = range(-1, num_segments) - big_arraymorph = am.ArrayMorphology(vertices = vertices, - connectivity = connectivity) - transposed_x = x+10 - transposed_vertices = np.array([transposed_x,y,z,d]).T + big_arraymorph = am.ArrayMorphology( + vertices=vertices, connectivity=connectivity + ) + transposed_x = x + 10 + transposed_vertices = np.array([transposed_x, y, z, d]).T - transposed_arraymorph = am.ArrayMorphology(vertices = transposed_vertices, - connectivity = connectivity) + transposed_arraymorph = am.ArrayMorphology( + vertices=transposed_vertices, connectivity=connectivity + ) bigger_d = d + 0.5 - fatter_vertices = np.array([x,y,z,bigger_d]).T + fatter_vertices = np.array([x, y, z, bigger_d]).T - fatter_arraymorph = am.ArrayMorphology(vertices = fatter_vertices, - connectivity = connectivity) + fatter_arraymorph = am.ArrayMorphology( + vertices=fatter_vertices, connectivity=connectivity + ) - neuroml_cell = neuroml.Cell(id='cell_4') - neuroml_morphology = neuroml.Morphology(id = 'my_morph') + neuroml_cell = neuroml.Cell(id="cell_4") + neuroml_morphology = neuroml.Morphology(id="my_morph") neuroml_cell.morphology = neuroml_morphology self.transposed_arraymorph = transposed_arraymorph self.fatter_arraymorph = fatter_arraymorph self.big_arraymorph = big_arraymorph - self.cell_1 = neuroml.Cell(id='cell_1') - self.cell_2 = neuroml.Cell(id='cell_2') - self.cell_3 = neuroml.Cell(id='cell_3') + self.cell_1 = neuroml.Cell(id="cell_1") + self.cell_2 = neuroml.Cell(id="cell_2") + self.cell_3 = neuroml.Cell(id="cell_3") self.cell_1.morphology = transposed_arraymorph self.cell_2.morphology = fatter_arraymorph self.cell_3.morphology = big_arraymorph - self.test_doc = neuroml.NeuroMLDocument(id='TestDocument') + self.test_doc = neuroml.NeuroMLDocument(id="TestDocument") self.test_doc.cells.append(self.cell_1) self.test_doc.cells.append(self.cell_2) @@ -70,8 +72,7 @@ def test_write_to_mongodb_backend(self): writer_method = neuroml.writers.JSONWriter.write_to_mongodb try: - writer_method(neuroml_document = self.test_doc, - db = 'mike_test_db3') + writer_method(neuroml_document=self.test_doc, db="mike_test_db3") except: self.fail("Exception raised!") @@ -83,34 +84,36 @@ def test_write_to_mongodb_expected(self): TODO: add a teardown """ - db_name = 'test_db_4' + db_name = "test_db_4" writer_method = neuroml.writers.JSONWriter.write_to_mongodb - writer_method(neuroml_document = self.test_doc, - db = db_name) + writer_method(neuroml_document=self.test_doc, db=db_name) loader_method = neuroml.loaders.JSONLoader.load_from_mongodb - doc = loader_method(db = db_name, - id = self.test_doc.id, - host = 'localhost') + doc = loader_method(db=db_name, id=self.test_doc.id, host="localhost") array_morph = doc.cells[2].morphology - connectivity_equal = np.testing.assert_array_equal(array_morph.connectivity,self.big_arraymorph.connectivity) - physical_masks_equal = np.testing.assert_array_equal(array_morph.physical_mask,self.big_arraymorph.physical_mask) - vertices_equal = np.testing.assert_array_equal(array_morph.vertices,self.big_arraymorph.vertices) - + connectivity_equal = np.testing.assert_array_equal( + array_morph.connectivity, self.big_arraymorph.connectivity + ) + physical_masks_equal = np.testing.assert_array_equal( + array_morph.physical_mask, self.big_arraymorph.physical_mask + ) + vertices_equal = np.testing.assert_array_equal( + array_morph.vertices, self.big_arraymorph.vertices + ) - self.assertEqual(connectivity_equal,None) #None when equal - self.assertEqual(physical_masks_equal,None) #None when equal - self.assertEqual(vertices_equal,None) #None when equal + self.assertEqual(connectivity_equal, None) # None when equal + self.assertEqual(physical_masks_equal, None) # None when equal + self.assertEqual(vertices_equal, None) # None when equal def test_write_multiple_morphologies(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.JSONWriter.write try: - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) except: self.fail("Exception raised!") @@ -123,7 +126,7 @@ def test_write_expected(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.JSONWriter.write - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) loader_method = neuroml.loaders.JSONLoader.load @@ -131,80 +134,86 @@ def test_write_expected(self): array_morph = doc.cells[2].morphology - connectivity_equal = np.testing.assert_array_equal(array_morph.connectivity,self.big_arraymorph.connectivity) - physical_masks_equal = np.testing.assert_array_equal(array_morph.physical_mask,self.big_arraymorph.physical_mask) - vertices_equal = np.testing.assert_array_equal(array_morph.vertices,self.big_arraymorph.vertices) + connectivity_equal = np.testing.assert_array_equal( + array_morph.connectivity, self.big_arraymorph.connectivity + ) + physical_masks_equal = np.testing.assert_array_equal( + array_morph.physical_mask, self.big_arraymorph.physical_mask + ) + vertices_equal = np.testing.assert_array_equal( + array_morph.vertices, self.big_arraymorph.vertices + ) - - self.assertEqual(connectivity_equal,None) #None when equal - self.assertEqual(physical_masks_equal,None) #None when equal - self.assertEqual(vertices_equal,None) #None when equal + self.assertEqual(connectivity_equal, None) # None when equal + self.assertEqual(physical_masks_equal, None) # None when equal + self.assertEqual(vertices_equal, None) # None when equal def test_writer_instance(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.JSONWriter.write - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) loader_method = neuroml.loaders.JSONLoader.load document = loader_method(filename) - self.assertIsInstance(document,neuroml.NeuroMLDocument) + self.assertIsInstance(document, neuroml.NeuroMLDocument) class TestNeuroMLWriter(unittest.TestCase): - def test_write_nonsense(self): """ Should Throw an attribute error as integers do not have an export method """ - a = 6 #not NeuroML-writeable + a = 6 # not NeuroML-writeable writer_method = neuroml.writers.NeuroMLWriter.write self.assertRaises(AttributeError, writer_method, a, "tmpfile") -class TestArrayMorphWriter(unittest.TestCase): +class TestArrayMorphWriter(unittest.TestCase): def setUp(self): num_segments = int(1e6) num_vertices = num_segments + 1 - x = np.linspace(0,10,num_vertices) + x = np.linspace(0, 10, num_vertices) y = np.zeros(num_vertices) z = np.zeros(num_vertices) - d = np.linspace(1,0.01,num_vertices) + d = np.linspace(1, 0.01, num_vertices) - vertices = np.array([x,y,z,d]).T + vertices = np.array([x, y, z, d]).T - connectivity = range(-1,num_segments) + connectivity = range(-1, num_segments) - big_arraymorph = am.ArrayMorphology(vertices = vertices, - connectivity = connectivity) - transposed_x = x+10 - transposed_vertices = np.array([transposed_x,y,z,d]).T + big_arraymorph = am.ArrayMorphology( + vertices=vertices, connectivity=connectivity + ) + transposed_x = x + 10 + transposed_vertices = np.array([transposed_x, y, z, d]).T - transposed_arraymorph = am.ArrayMorphology(vertices = transposed_vertices, - connectivity = connectivity) + transposed_arraymorph = am.ArrayMorphology( + vertices=transposed_vertices, connectivity=connectivity + ) bigger_d = d + 0.5 - fatter_vertices = np.array([x,y,z,bigger_d]).T - - fatter_arraymorph = am.ArrayMorphology(vertices = fatter_vertices, - connectivity = connectivity) + fatter_vertices = np.array([x, y, z, bigger_d]).T + fatter_arraymorph = am.ArrayMorphology( + vertices=fatter_vertices, connectivity=connectivity + ) self.transposed_arraymorph = transposed_arraymorph self.fatter_arraymorph = fatter_arraymorph self.big_arraymorph = big_arraymorph - self.cell_1 = neuroml.Cell(id='cell_1') - self.cell_2 = neuroml.Cell(id='cell_2') - self.cell_3 = neuroml.Cell(id='cell_3') + self.cell_1 = neuroml.Cell(id="cell_1") + self.cell_2 = neuroml.Cell(id="cell_2") + self.cell_3 = neuroml.Cell(id="cell_3") self.cell_1.morphology = transposed_arraymorph self.cell_2.morphology = fatter_arraymorph self.cell_3.morphology = big_arraymorph - self.test_doc = neuroml.NeuroMLDocument(id='TestDocument') + self.test_doc = neuroml.NeuroMLDocument(id="TestDocument") self.test_doc.cells.append(self.cell_1) self.test_doc.cells.append(self.cell_2) @@ -215,7 +224,7 @@ def test_write_big_arraymorph(self): filename = tempfile.mkstemp()[1] try: - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) except: self.fail("Exception raised!") @@ -228,35 +237,41 @@ def test_write_expected(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.ArrayMorphWriter.write - writer_method(self.big_arraymorph,filename) + writer_method(self.big_arraymorph, filename) loader_method = neuroml.loaders.ArrayMorphLoader.load doc = loader_method(filename) array_morph = doc.morphology[0] - connectivity_equal = np.testing.assert_array_equal(array_morph.connectivity,self.big_arraymorph.connectivity) - physical_masks_equal = np.testing.assert_array_equal(array_morph.physical_mask,self.big_arraymorph.physical_mask) - vertices_equal = np.testing.assert_array_equal(array_morph.vertices,self.big_arraymorph.vertices) + connectivity_equal = np.testing.assert_array_equal( + array_morph.connectivity, self.big_arraymorph.connectivity + ) + physical_masks_equal = np.testing.assert_array_equal( + array_morph.physical_mask, self.big_arraymorph.physical_mask + ) + vertices_equal = np.testing.assert_array_equal( + array_morph.vertices, self.big_arraymorph.vertices + ) - self.assertEqual(connectivity_equal,None) #None when equal - self.assertEqual(physical_masks_equal,None) #None when equal - self.assertEqual(vertices_equal,None) #None when equal + self.assertEqual(connectivity_equal, None) # None when equal + self.assertEqual(physical_masks_equal, None) # None when equal + self.assertEqual(vertices_equal, None) # None when equal def test_write_multiple_morphologies(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.ArrayMorphWriter.write try: - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) except: self.fail("Exception raised!") def test_write_multiple_morphologies(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.ArrayMorphWriter.write - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) loader_method = neuroml.loaders.ArrayMorphLoader.load document = loader_method(filename) - self.assertIsInstance(document,neuroml.NeuroMLDocument) + self.assertIsInstance(document, neuroml.NeuroMLDocument) diff --git a/neuroml/test/test_xml_parser.py b/neuroml/test/test_xml_parser.py index 2fa1824d..4f9426f2 100644 --- a/neuroml/test/test_xml_parser.py +++ b/neuroml/test/test_xml_parser.py @@ -17,23 +17,30 @@ class TestNeuroMLXMLParser(unittest.TestCase): - def test_parse(self): base_dir = os.path.dirname(__file__) - #base_dir = '.' + # base_dir = '.' - logging.basicConfig(level=logging.INFO, format="%(name)-19s %(levelname)-5s - %(message)s") + logging.basicConfig( + level=logging.INFO, format="%(name)-19s %(levelname)-5s - %(message)s" + ) - for f in ['simplenet.nml','testh5.nml','pyr_4_sym.cell.nml','MediumNet.net.nml','complete.nml']: + for f in [ + "simplenet.nml", + "testh5.nml", + "pyr_4_sym.cell.nml", + "MediumNet.net.nml", + "complete.nml", + ]: - file_name = base_dir+'/../examples/test_files/'+f + file_name = base_dir + "/../examples/test_files/" + f - nml_doc0 = loaders.read_neuroml2_file(file_name, - include_includes=True, - verbose=True) + nml_doc0 = loaders.read_neuroml2_file( + file_name, include_includes=True, verbose=True + ) - summary0 = nml_doc0.summary(show_includes=False,show_non_network=False) - print('\n'+summary0) + summary0 = nml_doc0.summary(show_includes=False, show_non_network=False) + print("\n" + summary0) from neuroml.hdf5.DefaultNetworkHandler import DefaultNetworkHandler @@ -43,7 +50,7 @@ def test_parse(self): currParser.parse(file_name) - print('-------------------------------\n\n') + print("-------------------------------\n\n") nmlHandler = NetworkBuilder() @@ -53,38 +60,39 @@ def test_parse(self): nml_doc = nmlHandler.get_nml_doc() - summary = nml_doc.summary(show_includes=False,show_non_network=False) + summary = nml_doc.summary(show_includes=False, show_non_network=False) print(summary) - compare(summary,summary0) + compare(summary, summary0) - nml_file = base_dir+'/../examples/tmp/EXP_'+f + nml_file = base_dir + "/../examples/tmp/EXP_" + f import neuroml.writers as writers + writers.NeuroMLWriter.write(nml_doc, nml_file) - print("Written network file to: "+nml_file) + print("Written network file to: " + nml_file) def runTest(self): print("Running tests in TestNeuroMLXMLParser") -def compare(s1,s2): - if s1==s2: +def compare(s1, s2): + if s1 == s2: print("Same!") return - l1 = s1.split('\n') - l2 = s2.split('\n') + l1 = s1.split("\n") + l2 = s2.split("\n") - for i in range(min(len(l1),len(l2))): - if not l1[i]==l2[i]: - print("Mismatch at line %i:\n>>> %s\n<<< %s"%(i,l1[i],l2[i])) - if len(l1)!=len(l2): + for i in range(min(len(l1), len(l2))): + if not l1[i] == l2[i]: + print("Mismatch at line %i:\n>>> %s\n<<< %s" % (i, l1[i], l2[i])) + if len(l1) != len(l2): print("Different number of lines!") - assert(s1==s2) + assert s1 == s2 -if __name__ == '__main__': +if __name__ == "__main__": tnxp = TestNeuroMLXMLParser() tnxp.test_parse() diff --git a/neuroml/utils.py b/neuroml/utils.py index 07c9abdf..3514885e 100644 --- a/neuroml/utils.py +++ b/neuroml/utils.py @@ -19,13 +19,18 @@ def validate_neuroml2(file_name): """ from lxml import etree - xsd_file = os.path.join(os.path.dirname(__file__), 'nml/NeuroML_%s.xsd'%neuroml.current_neuroml_version) + xsd_file = os.path.join( + os.path.dirname(__file__), + "nml/NeuroML_%s.xsd" % neuroml.current_neuroml_version, + ) with open(xsd_file) as schema_file: xmlschema = etree.XMLSchema(etree.parse(schema_file)) - print("Validating %s against %s" %(file_name, xsd_file)) + print("Validating %s against %s" % (file_name, xsd_file)) if not xmlschema.validate(etree.parse(file_name)): - xmlschema.assertValid(etree.parse(file_name)) # print reason if file is invalid + xmlschema.assertValid( + etree.parse(file_name) + ) # print reason if file is invalid return print("It's valid!") @@ -41,11 +46,14 @@ def is_valid_neuroml2(file_name): """ from lxml import etree - xsd_file = os.path.join(os.path.dirname(__file__), 'nml/NeuroML_%s.xsd' % neuroml.current_neuroml_version) + xsd_file = os.path.join( + os.path.dirname(__file__), + "nml/NeuroML_%s.xsd" % neuroml.current_neuroml_version, + ) with open(xsd_file) as schema_file: xmlschema = etree.XMLSchema(etree.parse(schema_file)) - return (xmlschema.validate(etree.parse(file_name))) + return xmlschema.validate(etree.parse(file_name)) return False @@ -68,7 +76,10 @@ def get_summary(nml_file_name): :rtype: str """ from neuroml.loaders import read_neuroml2_file - nml_doc = read_neuroml2_file(nml_file_name,include_includes=True, verbose=False, optimized=True) + + nml_doc = read_neuroml2_file( + nml_file_name, include_includes=True, verbose=False, optimized=True + ) return nml_doc.summary(show_includes=False) @@ -88,23 +99,25 @@ def add_all_to_document(nml_doc_src, nml_doc_tgt, verbose=False): membs = inspect.getmembers(nml_doc_src) for memb in membs: - if isinstance(memb[1], list) and len(memb[1])>0 \ - and not memb[0].endswith('_'): + if isinstance(memb[1], list) and len(memb[1]) > 0 and not memb[0].endswith("_"): for entry in memb[1]: - if memb[0] != 'includes': + if memb[0] != "includes": added = False for c in getattr(nml_doc_tgt, memb[0]): - if hasattr(c,'id') and c.id == entry.id: + if hasattr(c, "id") and c.id == entry.id: added = True if not added: - #print(" Adding %s to list: %s" \ + # print(" Adding %s to list: %s" \ # %(entry.id if hasattr(entry,'id') else entry.name, memb[0])) getattr(nml_doc_tgt, memb[0]).append(entry) added = True if not added: - raise Exception("Could not add %s from %s to %s"%(entry, nml_doc_src, nml_doc_tgt)) + raise Exception( + "Could not add %s from %s to %s" + % (entry, nml_doc_src, nml_doc_tgt) + ) def append_to_element(parent, child): @@ -117,25 +130,25 @@ def append_to_element(parent, child): :raises Exception: when the child could not be added to the parent """ membs = inspect.getmembers(parent) - #print("Adding %s to element %s"%(child, parent)) + # print("Adding %s to element %s"%(child, parent)) mappings = {} for mdi in parent.member_data_items_: mappings[mdi.data_type] = mdi.name added = False for memb in membs: - if isinstance(memb[1], list) and not memb[0].endswith('_'): - #print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) + if isinstance(memb[1], list) and not memb[0].endswith("_"): + # print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) if mappings[child.__class__.__name__] == memb[0]: for c in getattr(parent, memb[0]): if c.id == child.id: added = True if not added: getattr(parent, memb[0]).append(child) - #print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) + # print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) added = True if not added: - raise Exception("Could not add %s to %s"%(child, parent)) + raise Exception("Could not add %s to %s" % (child, parent)) def has_segment_fraction_info(connections): @@ -149,22 +162,27 @@ def has_segment_fraction_info(connections): if not connections: return False no_seg_fract_info = True - i=0 - while no_seg_fract_info and i Date: Mon, 27 Sep 2021 09:18:57 +0100 Subject: [PATCH 043/136] feat: update doc conf --- doc/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/conf.py b/doc/conf.py index be4584dc..c981929c 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -63,7 +63,7 @@ # space here is important since __version__ is used in generation of # version_info also if '__version__ =' in aline: - version = aline.split("'")[1] + version = aline.split("\"")[1] # The full version, including alpha/beta/rc tags. release = version From d23e94939f7d397c62a655af9137206e17df5265 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 09:01:21 +0100 Subject: [PATCH 044/136] chore: add requirements file for development --- requirements-dev.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 requirements-dev.txt diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 00000000..8e182bc9 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,2 @@ +generateds +pytest From ca08685fde51e961684dccc9e0c1ccc0072b3768 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 09:02:04 +0100 Subject: [PATCH 045/136] ci: add python 3.10 to test matrix 3.10 was released recently. https://docs.python.org/3.10/whatsnew/3.10.html --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index db7af520..ded3f68f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [2.7, 3.7, 3.8, 3.9] + python-version: [2.7, 3.7, 3.8, 3.9, 3.10] steps: - uses: actions/checkout@v2 From d6f1f66e13d0df0873dbdb0357c212202e892576 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 09:07:33 +0100 Subject: [PATCH 046/136] fix: quote "3.10" in ci otherwise it looks for 3.1 Reference: - https://github.com/actions/setup-python/issues/249#issuecomment-934299359 --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ded3f68f..864f6d94 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [2.7, 3.7, 3.8, 3.9, 3.10] + python-version: ["2.7", "3.7", "3.8", "3.9", "3.10"] steps: - uses: actions/checkout@v2 From 6f99b51394656e0f1a32621d105d87125eab53c6 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 09:26:42 +0100 Subject: [PATCH 047/136] ci: install hdf5-dev for pytables --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 864f6d94..9bcf0a31 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -27,6 +27,7 @@ jobs: - name: Install dependencies run: | + sudo apt-get install libhdf5-serial-dev -y python -m pip install --upgrade pip python -m pip install flake8 pytest if [ -f requirements.txt ]; then pip install -r requirements.txt; fi From f28eda71d0ec97c6bf959f23cc780cbdc552b735 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 09:34:50 +0100 Subject: [PATCH 048/136] ci: install liblzo2-dev for pytables The docs say it's optional, but the pytables build failure seems to indicate it's required. --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9bcf0a31..551dd637 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -27,7 +27,7 @@ jobs: - name: Install dependencies run: | - sudo apt-get install libhdf5-serial-dev -y + sudo apt-get install libhdf5-serial-dev liblzo2-dev -y python -m pip install --upgrade pip python -m pip install flake8 pytest if [ -f requirements.txt ]; then pip install -r requirements.txt; fi From 33e1e81c5fe03f1a5c5a6c4d55f75878d5048c1f Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 09:35:53 +0100 Subject: [PATCH 049/136] chore: add wheel to dev requirements --- requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-dev.txt b/requirements-dev.txt index 8e182bc9..db6cd988 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,2 +1,3 @@ generateds pytest +wheel From a40874473cdaafac2bebb641dd2989e60937d1ec Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 09:51:42 +0100 Subject: [PATCH 050/136] chore: use latest git snapshot for tables for python >= 3.10 The last release they made does not work with Python 3.10. Fixes are in the GitHub repo but pending a new release. --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 338e612b..f3c8f226 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,6 +5,7 @@ numpy pymongo numexpr simplejson; python_version < '3.5' -tables>=3.3.0 +git+git://github.com/PyTables/PyTables/@master#egg=tables ; python_version >= '3.10' +tables>=3.3.0 ; python_version < '3.10' jsonpickle>=0.9.6 pytest From 13d1882c41845f13e5aca1d070cd8dec06f3270e Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 10:23:44 +0100 Subject: [PATCH 051/136] chore: reorder requirements Cython must be installed before pip tries to build pytables. Also moves flake and pytest to requirements-dev. --- .github/workflows/ci.yml | 2 +- requirements-dev.txt | 4 +++- requirements.txt | 2 -- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 551dd637..b2d30600 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,7 +29,7 @@ jobs: run: | sudo apt-get install libhdf5-serial-dev liblzo2-dev -y python -m pip install --upgrade pip - python -m pip install flake8 pytest + if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Build package diff --git a/requirements-dev.txt b/requirements-dev.txt index db6cd988..2cf55397 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,3 +1,5 @@ +wheel generateds +cython +flake8 pytest -wheel diff --git a/requirements.txt b/requirements.txt index f3c8f226..4d4fc3f2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,5 @@ six lxml -cython numpy pymongo numexpr @@ -8,4 +7,3 @@ simplejson; python_version < '3.5' git+git://github.com/PyTables/PyTables/@master#egg=tables ; python_version >= '3.10' tables>=3.3.0 ; python_version < '3.10' jsonpickle>=0.9.6 -pytest From c9d6c0effc1a9598b1ab9dbd32d689fd4c4fda52 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 10:34:12 +0100 Subject: [PATCH 052/136] chore: add numpy to dev to allow pytables wheels to build --- requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-dev.txt b/requirements-dev.txt index 2cf55397..13188a3f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -3,3 +3,4 @@ generateds cython flake8 pytest +numpy From cafdb8fde141cb3f2d0e3e9e8960b85be882b615 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 10:43:26 +0100 Subject: [PATCH 053/136] chore: add python 3.10 in setup.py (and reformat with black) --- setup.py | 70 ++++++++++++++++++++++++++++++-------------------------- 1 file changed, 37 insertions(+), 33 deletions(-) diff --git a/setup.py b/setup.py index d482d434..587062ca 100644 --- a/setup.py +++ b/setup.py @@ -4,44 +4,48 @@ long_description = open("README.md").read() -for line in open('neuroml/__init__.py'): +for line in open("neuroml/__init__.py"): if line.startswith("__version__"): version = line.split("=")[1].strip()[1:-1] setup( - name = "libNeuroML", - version = version, - packages = ['neuroml', 'neuroml.test','neuroml.nml','neuroml.hdf5'], - package_data = {'neuroml.test': ['*.nml'], 'neuroml.nml': ['*.xsd']}, - author = "libNeuroML authors and contributors", - author_email = "vellamike@gmail.com, p.gleeson@gmail.com", - description = "A Python library for working with NeuroML descriptions of neuronal models", - long_description = long_description, + name="libNeuroML", + version=version, + packages=["neuroml", "neuroml.test", "neuroml.nml", "neuroml.hdf5"], + package_data={"neuroml.test": ["*.nml"], "neuroml.nml": ["*.xsd"]}, + author="libNeuroML authors and contributors", + author_email="vellamike@gmail.com, p.gleeson@gmail.com", + description="A Python library for working with NeuroML descriptions of neuronal models", + long_description=long_description, long_description_content_type="text/markdown", - install_requires=['lxml', 'six'], + install_requires=["lxml", "six"], tests_require=["pytest"], - extras_require={"full": [ - "cython", - "numpy", - "pymongo", - "numexpr", - "simplejson; python_version < '3.5'", - "tables>=3.3.0", - "jsonpickle>=0.9.6" - ]}, - license = "BSD", + extras_require={ + "full": [ + "cython", + "numpy", + "pymongo", + "numexpr", + "simplejson; python_version < '3.5'", + "tables>=3.3.0", + "jsonpickle>=0.9.6", + ] + }, + license="BSD", url="http://libneuroml.readthedocs.org/en/latest/", - classifiers = [ - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: BSD License', - 'Natural Language :: English', - 'Operating System :: OS Independent', - 'Development Status :: 5 - Production/Stable', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Topic :: Scientific/Engineering :: Bio-Informatics', - 'Topic :: Scientific/Engineering'] + classifiers=[ + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Development Status :: 5 - Production/Stable", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Topic :: Scientific/Engineering :: Bio-Informatics", + "Topic :: Scientific/Engineering", + ], ) From 000eab5f9d381ced4cc7848701fdc1e85431a1d0 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 10:43:42 +0100 Subject: [PATCH 054/136] chore: add section for 0.2.57 in changelog (and reformat) --- README.md | 63 ++++++++++++++++++++++++++++++++++--------------------- 1 file changed, 39 insertions(+), 24 deletions(-) diff --git a/README.md b/README.md index 9eab7005..fb1a24ed 100644 --- a/README.md +++ b/README.md @@ -41,53 +41,68 @@ That branch is kept up to date with the development branches for [NeuroML 2](htt See https://docs.neuroml.org/ for an overview of the various NeuroML libraries. ## Changelog +### version 0.2.57 (dev) + +- Enable Python 3.10 support + +### version 0.2.56 + +- Documentation updates for RTD and other minor fixes. ### version 0.2.55 - - Patch release with minor changes under the hood. - - Use PyTest for testing. - - Enable CI on GitHub Actions +- Patch release with minor changes under the hood. +- Use PyTest for testing. +- Enable CI on GitHub Actions ### version 0.2.54 - - Using Schema for NeuroML v2.1. Better compatibility with Python 3 - ### version 0.2.50 - - Updated to use the final stable Schema for NeuroML v2.0 +- Using Schema for NeuroML v2.1. Better compatibility with Python 3 + +### version 0.2.50 + +- Updated to use the final stable Schema for NeuroML v2.0 ### version 0.2.47 - - Updated to use the final stable Schema for NeuroML v2beta5 + +- Updated to use the final stable Schema for NeuroML v2beta5 ### version 0.2.18 - - Updated to use the final stable Schema for NeuroML v2beta4 - - Tested with Python 3 + +- Updated to use the final stable Schema for NeuroML v2beta4 +- Tested with Python 3 ### version 0.2.4 - - Updated to use the Schema for NeuroML v2beta4 + +- Updated to use the Schema for NeuroML v2beta4 ### version 0.2.2 - - Updated to use the Schema for NeuroML v2beta3 - - Ensures numpy & pytables are only required when using non-XML loaders/writers + +- Updated to use the Schema for NeuroML v2beta3 +- Ensures numpy & pytables are only required when using non-XML loaders/writers ### version 0.2.0 - - Updated to use the Schema for NeuroML v2beta2 + +- Updated to use the Schema for NeuroML v2beta2 ### version 0.1.9 - - Minor release: Update to latest schema + +- Minor release: Update to latest schema ### version 0.1.8 - - Several Bug fixes and small enhamcements - - Support for latest NeuroML schema (see change outline) - - JSON serialization - - MongoDB backend - - HDF5 serialization - - Improved installation process - - All usage examples are now run on the Travis-CI continuous integration server to confirm that that they do not error. - - Schema validation utility - - Improved documentation and documentation new look +- Several Bug fixes and small enhamcements +- Support for latest NeuroML schema (see change outline) +- JSON serialization +- MongoDB backend +- HDF5 serialization +- Improved installation process +- All usage examples are now run on the Travis-CI continuous integration server to confirm that that they do not error. +- Schema validation utility +- Improved documentation and documentation new look -:copyright: Copyright 2020 by the libNeuroML team, see [AUTHORS](AUTHORS). Modified BSD License, see [LICENSE](LICENSE) for details. +:copyright: Copyright 2021 by the libNeuroML team, see [AUTHORS](AUTHORS). Modified BSD License, see [LICENSE](LICENSE) for details. [![Build Status](https://api.travis-ci.org/NeuralEnsemble/libNeuroML.png)](https://travis-ci.org/NeuralEnsemble/libNeuroML) From f3ecc6caf11c7819e09f8a46ffcb77d3d4d603b7 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 10:57:45 +0100 Subject: [PATCH 055/136] chore: add generateDS version constraints also add black to dev requirements --- requirements-dev.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 13188a3f..282d828e 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,6 +1,7 @@ wheel -generateds +generateds >= 2.20a cython flake8 pytest numpy +black From e2ce4558a56e2a89d4ad94138bc1653d13f1fee8 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 10:59:20 +0100 Subject: [PATCH 056/136] ci(nml.py): add an action to regenerate nml.py and run all tests This ensures we regularly check that nml.py is correctly generated. --- .github/workflows/regenerate.yml | 53 ++++++++++++++++++++++++++++++++ neuroml/nml/regenerate-nml.sh | 30 ++++++++++++++++++ 2 files changed, 83 insertions(+) create mode 100644 .github/workflows/regenerate.yml create mode 100755 neuroml/nml/regenerate-nml.sh diff --git a/.github/workflows/regenerate.yml b/.github/workflows/regenerate.yml new file mode 100644 index 00000000..34e9a5d4 --- /dev/null +++ b/.github/workflows/regenerate.yml @@ -0,0 +1,53 @@ +name: Regenerate nml.py + +on: + push: + branches: [ master, development ] + pull_request: + branches: [ master, development ] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["2.7", "3.7", "3.8", "3.9", "3.10"] + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + sudo apt-get install libhdf5-serial-dev liblzo2-dev -y + python -m pip install --upgrade pip + if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + + - name: Rebuild nml.py + run: | + cd neuroml/nml && rm -f nml.py && ./rebuild-nml.sh + + - name: Build package + run: | + pip install .[full] + + - name: Test with pytest + run: | + pytest + + - name: Run examples + run: | + cd ./neuroml/examples && python run_all.py + + - name: Lint with flake8 + run: | + # stop the build if there are Python syntax errors or undefined names + flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics diff --git a/neuroml/nml/regenerate-nml.sh b/neuroml/nml/regenerate-nml.sh new file mode 100755 index 00000000..6f3aac09 --- /dev/null +++ b/neuroml/nml/regenerate-nml.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# Copyright 2021 NeuroML contributors +# File : regenerate-nml.sh +# Regenerate nml.py from the current schema version + + +echo "Note(1): Please remember to update the schema from the NeuroML2 repository" +echo "Note(2): Must be run in neuroml/nml/" +NEUROML_VERSION=$(grep -E 'current_neuroml_version.*' ../__init__.py | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') +SCHEMA_FILE=NeuroML_${NEUROML_VERSION}.xsd + +if command -v generateDS > /dev/null 2>&1 +then + echo "Rebuilding nml.py from ${SCHEMA_FILE} (Version: ${NEUROML_VERSION})" + echo "generateds version:" + generateDS --version + + rm -f nml.py + export PYTHONPATH="$PYTHONPATH:." && generateDS -o nml.py --use-getter-setter=none --silence --user-methods=helper_methods.py $SCHEMA_FILE + + echo "Formatting new nml.py with black" + black nml.py +else + echo "GenerateDS not installed" + echo "Run: pip install generateds" + exit 1 +fi + +exit 0 From 1bd1d1b97adcd5730bd0554a3a2760eb3ce1c281 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 11:00:20 +0100 Subject: [PATCH 057/136] chore: update readme for regenerating nml.py --- neuroml/nml/README.md | 48 ++++++++++++++++++++++++++++++++++--------- 1 file changed, 38 insertions(+), 10 deletions(-) diff --git a/neuroml/nml/README.md b/neuroml/nml/README.md index 5eacc607..9465b3e2 100644 --- a/neuroml/nml/README.md +++ b/neuroml/nml/README.md @@ -1,31 +1,59 @@ ## Autogeneration of API, using generateds_config.py to ensure correct naming conventions. -This requires [generateDS.py](http://www.davekuhlman.org/generateDS.html), version >= 2.20a. +Please regenerate nml.py in a new branch and open a pull request against the `development` branch. -You can get it from [PyPi](http://pypi.python.org/pypi/generateDS/) or [Source Forge](https://sourceforge.net/projects/generateds/). +Please **do not** regenerate nml.py and push directly to the `master` branch because regeneration of `nml.py` may change the libNeuroML API, and so this could be a backwards incompatible change which must not be pushed to master without a corresponding release and sufficient testing. -All that is needed is the Schema - as long as generateds_config.py and helper_methods are present, nml.py should be generated correctly. +### Requirements -Unit tests should be run to confirm this. +- the Schema (XSD) file +- `generateds_config.py`: this includes the correct naming conventions +- `helper_methods.py`: this includes additional helper methods that we want to add to the API -generateDS.py should be invoked in this folder (so that generateds_config.py can be located) with the following command (namespace def here will be mended when it's become stable) +The following packages are required. - generateDS.py -o nml.py --use-getter-setter=none --silence --user-methods=helper_methods NeuroML_v2.2.xsd +- generateds +- black -You may have to add the current folder to your PYTHONPATH, i.e. +They can be installed using the `requirements-dev.txt` file in the root directory: + +``` +pip install -r requirements-dev.txt +``` + +### Regenerating nml.py + +- Run the `regenerate-nml.sh` script in the `neuroml/nml` folder. +- This will use the API version defined in `neuroml/__init__.py` to find the right schema XSD file and run generateds to regenerate the `nml.py` file. +- It will also run `black` on the newly generated file to reformat it. + +The generateDS command that is invoked is of this form: + + generateDS.py -o nml.py --use-getter-setter=none --silence --user-methods=helper_methods.py NeuroML_v2.2.xsd + +You will need to add the current folder to your PYTHONPATH so that the required files can be imported by generateDS. export PYTHONPATH=$PYTHONPATH:. -Note that generateDS.py will import the generateds_config.py file and run it. -Your output should, therefore, include lines of the form: +Your output should include lines of the form: generateds_config.py is being processed Saving NameTable to csv file: name_table.csv Saving name changes table to csv file: changed_names.csv -If these are not included in the output, generateds_config.py has not run, and the generated nml.py file will be incorrect. +If these are not included in the output, generateDS has not run correctly, and the generated nml.py file will be incorrect. + +### Testing + +Please remember to: + +- rebuild libNeuroML after regenerating `nml.py` +- run all unit tests +- run all examples + +The CI on GitHub will always run these. ### Changelog From 50ee7d0a738105e7003e54b3affed18e1d6ae7ba Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 11:57:13 +0100 Subject: [PATCH 058/136] chore(nml.py): add conditional imports for py2/3 --- neuroml/nml/generateds_config.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/neuroml/nml/generateds_config.py b/neuroml/nml/generateds_config.py index 706eb7a3..9fe2a48f 100644 --- a/neuroml/nml/generateds_config.py +++ b/neuroml/nml/generateds_config.py @@ -5,9 +5,18 @@ import lxml from lxml import objectify import re -from config import variables import csv import sys +import process_includes +# In Python 3, it's io +try: + from StringIO import StringIO +except ImportError: + from io import StringIO +from config import variables + + + sys.setrecursionlimit(10000) @@ -80,10 +89,7 @@ def _node_to_python(node): filename = variables["schema_name"] -import StringIO -import process_includes - -outfile = StringIO.StringIO() +outfile = StringIO() infile = open(filename, "r") process_includes.process_include_files(infile, outfile, inpath=filename) @@ -148,12 +154,12 @@ def _node_to_python(node): print(NameTable) print("Saving NameTable to csv file") -writer = csv.writer(open("name_table.csv", "wb")) +writer = csv.writer(open("name_table.csv", "w")) for key, value in NameTable.items(): writer.writerow([key, value]) print("Saving name changes table to csv file") -changes_writer = csv.writer(open("changed_names.csv", "wb")) +changes_writer = csv.writer(open("changed_names.csv", "w")) for key in NameTable: value = NameTable[key] if key != value: From 21abc169d709e34f72976319ad6750ac6e9671d0 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 11:58:30 +0100 Subject: [PATCH 059/136] chore: tweak generateds command --- neuroml/nml/regenerate-nml.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/nml/regenerate-nml.sh b/neuroml/nml/regenerate-nml.sh index 6f3aac09..196c7378 100755 --- a/neuroml/nml/regenerate-nml.sh +++ b/neuroml/nml/regenerate-nml.sh @@ -17,7 +17,7 @@ then generateDS --version rm -f nml.py - export PYTHONPATH="$PYTHONPATH:." && generateDS -o nml.py --use-getter-setter=none --silence --user-methods=helper_methods.py $SCHEMA_FILE + PYTHONPATH="$PYTHONPATH:." generateDS -o nml.py --use-getter-setter=none --user-methods=helper_methods.py $SCHEMA_FILE echo "Formatting new nml.py with black" black nml.py From 33a520cc782c5df9b932f1dc8285fd87a36d565f Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 12:43:34 +0100 Subject: [PATCH 060/136] chore: limit black to py3 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 282d828e..a5794f0f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,4 +4,4 @@ cython flake8 pytest numpy -black +black ; python_version > '3.0' From 39f9f00cb408345a4824ef9f2da11f3483297e7d Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 16:11:33 +0100 Subject: [PATCH 061/136] fix: correct file mode for py3 --- neuroml/nml/generateds_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/nml/generateds_config.py b/neuroml/nml/generateds_config.py index 46ec66fd..4cb361e8 100644 --- a/neuroml/nml/generateds_config.py +++ b/neuroml/nml/generateds_config.py @@ -153,7 +153,7 @@ def _node_to_python(node): print(NameTable) print("Saving NameTable to csv file") -writer = csv.writer(open("name_table.csv", "wb")) +writer = csv.writer(open("name_table.csv", "w")) for key, value in NameTable.items(): writer.writerow([key, value]) From 4a7cfe9904709fecdc6b30f1404384820dd2f4da Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 16:12:57 +0100 Subject: [PATCH 062/136] refactor: improve nml.py generator script --- neuroml/nml/regenerate-nml.sh | 82 +++++++++++++++++++++++++++++------ 1 file changed, 68 insertions(+), 14 deletions(-) diff --git a/neuroml/nml/regenerate-nml.sh b/neuroml/nml/regenerate-nml.sh index 196c7378..5bb68bde 100755 --- a/neuroml/nml/regenerate-nml.sh +++ b/neuroml/nml/regenerate-nml.sh @@ -10,21 +10,75 @@ echo "Note(2): Must be run in neuroml/nml/" NEUROML_VERSION=$(grep -E 'current_neuroml_version.*' ../__init__.py | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') SCHEMA_FILE=NeuroML_${NEUROML_VERSION}.xsd -if command -v generateDS > /dev/null 2>&1 +regenerate () { + if command -v generateDS > /dev/null 2>&1 + then + echo "Rebuilding nml.py from ${SCHEMA_FILE} (Version: ${NEUROML_VERSION})" + echo "generateds version:" + generateDS --version + + rm -f nml.py + PYTHONPATH="$PYTHONPATH:." generateDS -o nml.py --use-getter-setter=none --user-methods=helper_methods.py $SCHEMA_FILE + else + echo "GenerateDS not installed" + echo "Run: pip install generateds" + exit 1 + fi +} + +reformat () { + if command -v generateDS > /dev/null 2>&1 + then + echo "Formatting new nml.py with black" + black nml.py + exit 0 + else + echo "black is not installed" + echo "Run: pip install black" + exit 1 + fi +} + +usage () { + echo "$0: Regenerate and reformat nml.py from the XSD schema file" + echo + echo "Usage: $0 [-arfh]" + echo + echo "-r: regenerate" + echo "-f: reformat" + echo "-a: regenerate and reformat" + echo "-h: print this help text and exit" +} + +if [ $# -lt 1 ] then - echo "Rebuilding nml.py from ${SCHEMA_FILE} (Version: ${NEUROML_VERSION})" - echo "generateds version:" - generateDS --version - - rm -f nml.py - PYTHONPATH="$PYTHONPATH:." generateDS -o nml.py --use-getter-setter=none --user-methods=helper_methods.py $SCHEMA_FILE - - echo "Formatting new nml.py with black" - black nml.py -else - echo "GenerateDS not installed" - echo "Run: pip install generateds" + usage exit 1 fi -exit 0 +# parse options +while getopts "arfh" OPTION +do + case $OPTION in + r) + regenerate + exit 0 + ;; + f) + reformat + exit 0 + ;; + a) + regenerate && reformat + exit 0 + ;; + h) + usage + exit 0 + ;; + ?) + usage + exit 1 + ;; + esac +done From 4a9a5e3f4925794be9f35c71194b5b42c5cf167f Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 16:13:31 +0100 Subject: [PATCH 063/136] chore: regenerated nml.py --- neuroml/nml/nml.py | 81 +++++++++++++++++++++++++++++----------------- 1 file changed, 51 insertions(+), 30 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 72bfbfe9..f3f842da 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,21 +2,19 @@ # -*- coding: utf-8 -*- # -# Generated Fri Aug 27 11:33:40 2021 by generateDS.py version 2.39.9. -# Python 3.9.6 (default, Jul 16 2021, 00:00:00) [GCC 11.1.1 20210531 (Red Hat 11.1.1-3)] +# Generated Fri Oct 8 15:17:16 2021 by generateDS.py version 2.40.3. +# Python 3.10.0rc2 (default, Sep 8 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: # ('-o', 'nml.py') # ('--use-getter-setter', 'none') -# ('--no-warnings', '') -# ('--silence', '') -# ('--user-methods', './helper_methods.py') +# ('--user-methods', 'helper_methods.py') # # Command line arguments: # NeuroML_v2.2.xsd # # Command line: -# /home/asinha/.virtualenvs/generateds-39/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --no-warnings --silence --user-methods="./helper_methods.py" NeuroML_v2.2.xsd +# /home/asinha/.local/share/virtualenvs/generateds-310/bin/generateDS -o "nml.py" --use-getter-setter="none" --user-methods="helper_methods.py" NeuroML_v2.2.xsd # # Current working directory (os.getcwd()): # nml @@ -202,6 +200,32 @@ def tzname(self, dt): def dst(self, dt): return None + def __str__(self): + settings = { + "str_pretty_print": True, + "str_indent_level": 0, + "str_namespaceprefix": "", + "str_name": None, + "str_namespacedefs": "", + } + for n in settings: + if hasattr(self, n): + setattr(settings[n], self[n]) + from io import StringIO + + output = StringIO() + self.export( + output, + settings["str_indent_level"], + pretty_print=settings["str_pretty_print"], + namespaceprefix_=settings["str_namespaceprefix"], + name_=settings["str_name"], + namespacedef_=settings["str_namespacedefs"], + ) + strval = output.getvalue() + output.close() + return strval + def gds_format_string(self, input_data, input_name=""): return input_data @@ -215,7 +239,7 @@ def gds_validate_string(self, input_data, node=None, input_name=""): return input_data def gds_format_base64(self, input_data, input_name=""): - return base64.b64encode(input_data) + return base64.b64encode(input_data).decode("ascii") def gds_validate_base64(self, input_data, node=None, input_name=""): return input_data @@ -56675,12 +56699,11 @@ def parse(inFileName, silence=False, print_warnings=True): if not SaveElementTreeNode: doc = None rootNode = None - ## if not silence: - ## sys.stdout.write('\n') - ## rootObj.export( - ## sys.stdout, 0, name_=rootTag, - ## namespacedef_=namespacedefs, - ## pretty_print=True) + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, namespacedef_=namespacedefs, pretty_print=True + ) if print_warnings and len(gds_collector.get_messages()) > 0: separator = ("-" * 50) + "\n" sys.stderr.write(separator) @@ -56728,12 +56751,12 @@ def parseEtree( if not SaveElementTreeNode: doc = None rootNode = None - ## if not silence: - ## content = etree_.tostring( - ## rootElement, pretty_print=True, - ## xml_declaration=True, encoding="utf-8") - ## sys.stdout.write(str(content)) - ## sys.stdout.write('\n') + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, xml_declaration=True, encoding="utf-8" + ) + sys.stdout.write(str(content)) + sys.stdout.write("\n") if print_warnings and len(gds_collector.get_messages()) > 0: separator = ("-" * 50) + "\n" sys.stderr.write(separator) @@ -56767,11 +56790,9 @@ def parseString(inString, silence=False, print_warnings=True): rootObj.build(rootNode, gds_collector_=gds_collector) if not SaveElementTreeNode: rootNode = None - ## if not silence: - ## sys.stdout.write('\n') - ## rootObj.export( - ## sys.stdout, 0, name_=rootTag, - ## namespacedef_='') + if not silence: + sys.stdout.write('\n') + rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_="") if print_warnings and len(gds_collector.get_messages()) > 0: separator = ("-" * 50) + "\n" sys.stderr.write(separator) @@ -56800,12 +56821,12 @@ def parseLiteral(inFileName, silence=False, print_warnings=True): if not SaveElementTreeNode: doc = None rootNode = None - ## if not silence: - ## sys.stdout.write('#from nml import *\n\n') - ## sys.stdout.write('import nml as model_\n\n') - ## sys.stdout.write('rootObj = model_.rootClass(\n') - ## rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) - ## sys.stdout.write(')\n') + if not silence: + sys.stdout.write("#from nml import *\n\n") + sys.stdout.write("import nml as model_\n\n") + sys.stdout.write("rootObj = model_.rootClass(\n") + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(")\n") if print_warnings and len(gds_collector.get_messages()) > 0: separator = ("-" * 50) + "\n" sys.stderr.write(separator) From 44c00c2aa7db5c7fdda5f26cae0c74b86a331a5d Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 16:32:50 +0100 Subject: [PATCH 064/136] feat!: drop json serialisation lxml.etree._Element objects cannot be pickled. See #111 for more information. Fixes #111 BREAKING CHANGE: drops json serialisation --- neuroml/benchmarks/arraymorph_benchmarks.py | 51 +--- neuroml/examples/json_serialization.py | 74 ------ neuroml/examples/run_all.py | 1 - neuroml/examples/test_files/complete.nml | 276 ++++++++++---------- neuroml/examples/test_files/complete.nml.h5 | Bin 69930 -> 69930 bytes neuroml/examples/test_files/testh5.nml | 204 +++++++-------- neuroml/loaders.py | 50 ---- neuroml/test/test_writers.py | 145 ---------- neuroml/writers.py | 87 ------ requirements.txt | 1 - setup.py | 1 - 11 files changed, 243 insertions(+), 647 deletions(-) delete mode 100644 neuroml/examples/json_serialization.py diff --git a/neuroml/benchmarks/arraymorph_benchmarks.py b/neuroml/benchmarks/arraymorph_benchmarks.py index c41d4221..855e62a8 100644 --- a/neuroml/benchmarks/arraymorph_benchmarks.py +++ b/neuroml/benchmarks/arraymorph_benchmarks.py @@ -71,30 +71,10 @@ def write_time(self): def run_neuroml(self): self.test_write_big_arraymorph_neuroml() - @timeit - def run_json(self): - self.test_write_big_arraymorph_json() - @timeit def run_hdf5(self): self.test_write_big_arraymorph_hdf5() - def test_write_big_arraymorph_json(self): - writer_method = neuroml.writers.JSONWriter.write - fh, filename = tempfile.mkstemp() - - try: - writer_method(self.test_doc, filename) - except: - self.fail("Exception raised!") - - print("JSON Number of segments:") - print(self.num_segments) - print("JSON size in bytes:") - print(self.file_size(filename)) - - os.close(fh) - def test_write_big_arraymorph_neuroml(self): writer_method = neuroml.writers.NeuroMLWriter.write fh, filename = tempfile.mkstemp() @@ -185,16 +165,13 @@ def benchmark_arraymorph_writer(): """ num_tests = 10 - json_results = [] neuroml_results = [] hdf5_results = [] for i in range(num_tests): - json_runtimes = [] hdf5_runtimes = [] neuroml_runtimes = [] - json_num_segments_list = [] hdf5_num_segments_list = [] neuroml_num_segments_list = [] @@ -202,47 +179,36 @@ def benchmark_arraymorph_writer(): print("test %d" % (i)) neuroml_num_segments_factor = 4e2 - json_num_segments_factor = 4e2 hdf5_num_segments_factor = 4e2 neuroml_num_segments = i * neuroml_num_segments_factor - json_num_segments = i * json_num_segments_factor hdf5_num_segments = i * hdf5_num_segments_factor neuroml_benchmark = AMWriteBenchmark(num_segments=neuroml_num_segments) - json_benchmark = AMWriteBenchmark(num_segments=json_num_segments) hdf5_benchmark = AMWriteBenchmark(num_segments=hdf5_num_segments) write_time_neuroml = neuroml_benchmark.run_neuroml() - write_time_json = json_benchmark.run_json() write_time_hdf5 = hdf5_benchmark.run_hdf5() neuroml_runtimes.append(write_time_neuroml) - json_runtimes.append(write_time_json) hdf5_runtimes.append(write_time_hdf5) neuroml_num_segments_list.append(neuroml_num_segments) - json_num_segments_list.append(json_num_segments) hdf5_num_segments_list.append(hdf5_num_segments) neuroml_results.append(neuroml_runtimes) - json_results.append(json_runtimes) hdf5_results.append(hdf5_runtimes) np.savetxt("neuroml_results.csv", neuroml_results, delimiter=",") - np.savetxt("json_results.csv", json_results, delimiter=",") np.savetxt("hdf5_results.csv", hdf5_results, delimiter=",") neuroml_runtimes_averaged = np.mean(neuroml_results, axis=0) - json_runtimes_averaged = np.mean(json_results, axis=0) hdf5_runtimes_averaged = np.mean(hdf5_results, axis=0) hdf5_errors = np.std(hdf5_results, axis=0) - json_errors = np.std(json_results, axis=0) neuroml_errors = np.std(neuroml_results, axis=0) neuroml_num_segments_list = np.array(neuroml_num_segments_list) - json_num_segments_list = np.array(json_num_segments_list) hdf5_num_segments_list = np.array(hdf5_num_segments_list) plt_neuroml = plt.errorbar( @@ -277,27 +243,16 @@ def benchmark_arraymorph_writer(): # plt.show() - plt_json = plt.errorbar( - json_num_segments_list, - json_runtimes_averaged, - yerr=json_errors, - marker="o", - color="k", - ecolor="k", - markerfacecolor="b", - label="series 2", - capsize=5, - ) plt.title( - "ArrayMorph write to disk benchmarks for JSON, HDF5 and NeuroML serialization formats" + "ArrayMorph write to disk benchmarks for HDF5 and NeuroML serialization formats" ) plt.xlabel("Number of segments in morphology") plt.ylabel("Time to write to disk (s)") plt.legend( - [plt_json, plt_hdf5, plt_neuroml], - ["JSON serialization", "HDF5 serialization", "NeuroML serialization"], + [plt_hdf5, plt_neuroml], + ["HDF5 serialization", "NeuroML serialization"], ) plt.yscale("log") diff --git a/neuroml/examples/json_serialization.py b/neuroml/examples/json_serialization.py deleted file mode 100644 index 381ef395..00000000 --- a/neuroml/examples/json_serialization.py +++ /dev/null @@ -1,74 +0,0 @@ -""" -In this example an axon is built, a morphology is loaded, the axon is -then connected to the loadeed morphology. The whole thing is serialized -in JSON format, reloaded and validated. -""" - -import neuroml -import neuroml.loaders as loaders -import neuroml.writers as writers - -fn = "./test_files/Purk2M9s.nml" -doc = loaders.NeuroMLLoader.load(fn) -print("Loaded morphology file from: " + fn) - -# get the parent segment: -parent_segment = doc.cells[0].morphology.segments[0] - -parent = neuroml.SegmentParent(segments=parent_segment.id) - -# make an axon: -seg_id = 5000 # need a way to get a unique id from a morphology -axon_segments = [] -for i in range(10): - p = neuroml.Point3DWithDiam( - x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1, - ) - - d = neuroml.Point3DWithDiam( - x=parent_segment.distal.x + 10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1, - ) - - axon_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) - - axon_segment.id = seg_id - - axon_segment.name = "axon_segment_" + str(axon_segment.id) - - # now reset everything: - parent = neuroml.SegmentParent(segments=axon_segment.id) - parent_segment = axon_segment - seg_id += 1 - - axon_segments.append(axon_segment) - -doc.cells[0].morphology.segments += axon_segments - -json_file = "./tmp/modified_morphology.json" - -writers.JSONWriter.write(doc, json_file) - -print("Saved modified morphology in JSON format to: " + json_file) - - -##### load it again, this time write it to a normal neuroml file ### - -neuroml_document_from_json = loaders.JSONLoader.load(json_file) - -print("Re-loaded neuroml document in JSON format to NeuroMLDocument object") - -nml_file = "./tmp/modified_morphology_from_json.nml" - -writers.NeuroMLWriter.write(neuroml_document_from_json, nml_file) - -###### Validate the NeuroML ###### - -from neuroml.utils import validate_neuroml2 - -validate_neuroml2(nml_file) diff --git a/neuroml/examples/run_all.py b/neuroml/examples/run_all.py index 69eff236..51036714 100644 --- a/neuroml/examples/run_all.py +++ b/neuroml/examples/run_all.py @@ -13,7 +13,6 @@ def run_example(ex_file): run_example("build_network2.py") run_example("build_complete.py") run_example("ion_channel_generation.py") -run_example("json_serialization.py") run_example("loading_modifying_writing.py") # This next one takes a while to run but all of its functionality is covered in loading_modifying_writing.py # run_example("loading_modifying_writing_large.py") diff --git a/neuroml/examples/test_files/complete.nml b/neuroml/examples/test_files/complete.nml index c6ae5129..ce78e6df 100644 --- a/neuroml/examples/test_files/complete.nml +++ b/neuroml/examples/test_files/complete.nml @@ -42,160 +42,160 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - + diff --git a/neuroml/examples/test_files/complete.nml.h5 b/neuroml/examples/test_files/complete.nml.h5 index f70ae47bbfe936fe6f887577e448343634728ae2..c1b02e5fc2ff14fc4b5dd993bb38873b07d8c093 100644 GIT binary patch delta 154 zcmZ3rh-K9xmJKgC8O=Ap;_Q-SDNRX8oP5F0Ve<~<7$F2N!mLCB%-ejyuS5hP-jRF< riMOKSj4(oOM-MXZ#DXIbxy=){_=rNxdT_D^Dwc4!1j#mu4?>CndIdoC delta 154 zcmZ3rh-K9xmJKgC8O=7o;_Q-S;o~nzn0&#{Ve<~<7$F2N!mLCB%-ejyuS5hP-jRF< riMOKSj4(oOM-MXZ#DXIbxy=){_=rNxdT_D^Dwc4!1j#mu4?>CnW7;`} diff --git a/neuroml/examples/test_files/testh5.nml b/neuroml/examples/test_files/testh5.nml index bbcbf974..232103e7 100644 --- a/neuroml/examples/test_files/testh5.nml +++ b/neuroml/examples/test_files/testh5.nml @@ -37,112 +37,112 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/neuroml/loaders.py b/neuroml/loaders.py index 52aa7254..84ce62b3 100644 --- a/neuroml/loaders.py +++ b/neuroml/loaders.py @@ -146,56 +146,6 @@ def load_swc_single(cls, src, name=None): ) -class JSONLoader(object): - @classmethod - def load(cls, file): - - from jsonpickle import decode as json_decode - - if isinstance(file, str): - fileh = open(file, "r") - else: - fileh = file - - json_string = fileh.read() - unpickled = json_decode(json_string) - fileh.close() - return unpickled - - @classmethod - def load_from_mongodb(cls, db, id, host=None, port=None): - - from pymongo import MongoClient - - try: - import simplejson as json - except ImportError: - import json - - from jsonpickle import decode as json_decode - - if host == None: - host = "localhost" - if port == None: - port = 27017 - - client = MongoClient(host, port) - - db = client[db] - - collection = db[id] - - doc = collection.find_one() - - del doc["_id"] - - doc = json.dumps(doc) - - document = json_decode(doc) - - return document - - class ArrayMorphLoader(object): @classmethod def __extract_morphology(cls, node): diff --git a/neuroml/test/test_writers.py b/neuroml/test/test_writers.py index 4d7a4f52..e1ae6733 100644 --- a/neuroml/test/test_writers.py +++ b/neuroml/test/test_writers.py @@ -14,151 +14,6 @@ import unittest -class TestJSONWriter(unittest.TestCase): - def setUp(self): - num_segments = int(1e4) # Per cell - num_vertices = num_segments + 1 - - x = np.linspace(0, 10, num_vertices) - y = np.zeros(num_vertices) - z = np.zeros(num_vertices) - d = np.linspace(1, 0.01, num_vertices) - - vertices = np.array([x, y, z, d]).T - - connectivity = range(-1, num_segments) - - big_arraymorph = am.ArrayMorphology( - vertices=vertices, connectivity=connectivity - ) - transposed_x = x + 10 - transposed_vertices = np.array([transposed_x, y, z, d]).T - - transposed_arraymorph = am.ArrayMorphology( - vertices=transposed_vertices, connectivity=connectivity - ) - - bigger_d = d + 0.5 - fatter_vertices = np.array([x, y, z, bigger_d]).T - - fatter_arraymorph = am.ArrayMorphology( - vertices=fatter_vertices, connectivity=connectivity - ) - - neuroml_cell = neuroml.Cell(id="cell_4") - neuroml_morphology = neuroml.Morphology(id="my_morph") - neuroml_cell.morphology = neuroml_morphology - - self.transposed_arraymorph = transposed_arraymorph - self.fatter_arraymorph = fatter_arraymorph - self.big_arraymorph = big_arraymorph - - self.cell_1 = neuroml.Cell(id="cell_1") - self.cell_2 = neuroml.Cell(id="cell_2") - self.cell_3 = neuroml.Cell(id="cell_3") - - self.cell_1.morphology = transposed_arraymorph - self.cell_2.morphology = fatter_arraymorph - self.cell_3.morphology = big_arraymorph - - self.test_doc = neuroml.NeuroMLDocument(id="TestDocument") - - self.test_doc.cells.append(self.cell_1) - self.test_doc.cells.append(self.cell_2) - self.test_doc.cells.append(self.cell_3) - self.test_doc.cells.append(neuroml_cell) - - def test_write_to_mongodb_backend(self): - writer_method = neuroml.writers.JSONWriter.write_to_mongodb - - try: - writer_method(neuroml_document=self.test_doc, db="mike_test_db3") - except: - self.fail("Exception raised!") - - def test_write_to_mongodb_expected(self): - """ - More of an integration test, write a file and confirm the contents are - as expected. - - TODO: add a teardown - """ - - db_name = "test_db_4" - - writer_method = neuroml.writers.JSONWriter.write_to_mongodb - writer_method(neuroml_document=self.test_doc, db=db_name) - - loader_method = neuroml.loaders.JSONLoader.load_from_mongodb - - doc = loader_method(db=db_name, id=self.test_doc.id, host="localhost") - - array_morph = doc.cells[2].morphology - - connectivity_equal = np.testing.assert_array_equal( - array_morph.connectivity, self.big_arraymorph.connectivity - ) - physical_masks_equal = np.testing.assert_array_equal( - array_morph.physical_mask, self.big_arraymorph.physical_mask - ) - vertices_equal = np.testing.assert_array_equal( - array_morph.vertices, self.big_arraymorph.vertices - ) - - self.assertEqual(connectivity_equal, None) # None when equal - self.assertEqual(physical_masks_equal, None) # None when equal - self.assertEqual(vertices_equal, None) # None when equal - - def test_write_multiple_morphologies(self): - filename = tempfile.mkstemp()[1] - writer_method = neuroml.writers.JSONWriter.write - try: - writer_method(self.test_doc, filename) - except: - self.fail("Exception raised!") - - def test_write_expected(self): - """ - More of an integration test, write a file and confirm the contents are - as expected. - """ - - filename = tempfile.mkstemp()[1] - - writer_method = neuroml.writers.JSONWriter.write - writer_method(self.test_doc, filename) - - loader_method = neuroml.loaders.JSONLoader.load - - doc = loader_method(filename) - - array_morph = doc.cells[2].morphology - - connectivity_equal = np.testing.assert_array_equal( - array_morph.connectivity, self.big_arraymorph.connectivity - ) - physical_masks_equal = np.testing.assert_array_equal( - array_morph.physical_mask, self.big_arraymorph.physical_mask - ) - vertices_equal = np.testing.assert_array_equal( - array_morph.vertices, self.big_arraymorph.vertices - ) - - self.assertEqual(connectivity_equal, None) # None when equal - self.assertEqual(physical_masks_equal, None) # None when equal - self.assertEqual(vertices_equal, None) # None when equal - - def test_writer_instance(self): - filename = tempfile.mkstemp()[1] - writer_method = neuroml.writers.JSONWriter.write - writer_method(self.test_doc, filename) - - loader_method = neuroml.loaders.JSONLoader.load - document = loader_method(filename) - - self.assertIsInstance(document, neuroml.NeuroMLDocument) - - class TestNeuroMLWriter(unittest.TestCase): def test_write_nonsense(self): """ diff --git a/neuroml/writers.py b/neuroml/writers.py index e28f9212..e4bd2823 100644 --- a/neuroml/writers.py +++ b/neuroml/writers.py @@ -114,93 +114,6 @@ def write_xml_and_hdf5(cls,nml_doc0,xml_file_name,h5_file_name): nml_doc0.includes.remove(inc)""" -class JSONWriter(object): - """ - Write a NeuroMLDocument to JSON, particularly useful - when dealing with lots of ArrayMorphs. - """ - - @classmethod - def __encode_as_json(cls, neuroml_document): - neuroml_document = cls.__sanitize_doc(neuroml_document) - from jsonpickle import encode as json_encode - - try: - # Enable encoding of numpy arrays with recent jsonpickle versions - import jsonpickle.ext.numpy as jsonpickle_numpy - - jsonpickle_numpy.register_handlers() - except ImportError: - pass # older version of jsonpickle - encoded = json_encode(neuroml_document) - return encoded - - @classmethod - def __sanitize_doc(cls, neuroml_document): - """ - Some operations will need to be performed - before the document is JSON-pickleable. - """ - - for cell in neuroml_document.cells: - try: - cell.morphology.vertices = cell.morphology.vertices.tolist() - cell.morphology.physical_mask = cell.morphology.physical_mask.tolist() - cell.morphology.connectivity = cell.morphology.connectivity.tolist() - except: - pass - - return neuroml_document - - @classmethod - def __file_handle(cls, filepath): - if isinstance(filepath, string_types): - import tables - - fileh = tables.open_file(filepath, mode="w") - - @classmethod - def write(cls, neuroml_document, file): - if isinstance(file, string_types): - fileh = open(file, mode="w") - else: - fileh = file - - if isinstance(neuroml_document, neuroml.NeuroMLDocument): - encoded = cls.__encode_as_json(neuroml_document) - - else: - raise NotImplementedError( - "Currently you can only serialize NeuroMLDocument type in JSON format" - ) - - fileh.write(encoded) - fileh.close() - - @classmethod - def write_to_mongodb(cls, neuroml_document, db, host=None, port=None, id=None): - from pymongo import MongoClient - import json - - if id == None: - id = neuroml_document.id - - if host == None: - host = "localhost" - if port == None: - port = 27017 - - client = MongoClient(host, port) - db = client[db] - collection = db[id] - - if isinstance(neuroml_document, neuroml.NeuroMLDocument): - encoded = cls.__encode_as_json(neuroml_document) - - encoded_dict = json.loads(encoded) - collection.insert(encoded_dict) - - class ArrayMorphWriter(object): """ For now just testing a simple method which can write a morphology, not a NeuroMLDocument. diff --git a/requirements.txt b/requirements.txt index 4d4fc3f2..02430e11 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,4 +6,3 @@ numexpr simplejson; python_version < '3.5' git+git://github.com/PyTables/PyTables/@master#egg=tables ; python_version >= '3.10' tables>=3.3.0 ; python_version < '3.10' -jsonpickle>=0.9.6 diff --git a/setup.py b/setup.py index 587062ca..5dc68e28 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,6 @@ "numexpr", "simplejson; python_version < '3.5'", "tables>=3.3.0", - "jsonpickle>=0.9.6", ] }, license="BSD", From 9a671cfc8e32843bf18399a914d83941645ee06a Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 16:36:54 +0100 Subject: [PATCH 065/136] fix: correct nml.py regeneration in ci --- .github/workflows/regenerate.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/regenerate.yml b/.github/workflows/regenerate.yml index 34e9a5d4..e6faeb87 100644 --- a/.github/workflows/regenerate.yml +++ b/.github/workflows/regenerate.yml @@ -31,7 +31,7 @@ jobs: - name: Rebuild nml.py run: | - cd neuroml/nml && rm -f nml.py && ./rebuild-nml.sh + cd neuroml/nml && rm -f nml.py && ./regenerate-nml.sh -a - name: Build package run: | From dd7303d642cdfd1d9ae1ea801aabec1fc6e18b4b Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 16:41:27 +0100 Subject: [PATCH 066/136] chore: pin generateds to latest known working version for Py2 --- requirements-dev.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index a5794f0f..d3a0674e 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,6 @@ wheel -generateds >= 2.20a +generateds >= 2.20a; python_version > '3.0' +generateds = 2.30.11; python_version = '2.7' cython flake8 pytest From d3d6644f8d501f3780d151d3b349c3710b022b80 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 16:44:46 +0100 Subject: [PATCH 067/136] chore: correct version markers in requirements-dev --- requirements-dev.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index d3a0674e..6bf411fb 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,8 +1,8 @@ wheel -generateds >= 2.20a; python_version > '3.0' -generateds = 2.30.11; python_version = '2.7' +generateds >= 2.20a; python_version >= '3.0' +generateds == 2.30.11; python_version == '2.7' cython flake8 pytest numpy -black ; python_version > '3.0' +black ; python_version >= '3.0' From dea3b49e118386e58aaffa7450c74fe592e24aef Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 16:51:38 +0100 Subject: [PATCH 068/136] fix: tweak regenerate script for py2.7 The version of generateDS we use for Python 2.7 has a slightly different way of taking in the helper method file. It does not require the `.py` suffix. --- neuroml/nml/regenerate-nml.sh | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/neuroml/nml/regenerate-nml.sh b/neuroml/nml/regenerate-nml.sh index 5bb68bde..6c2aa065 100755 --- a/neuroml/nml/regenerate-nml.sh +++ b/neuroml/nml/regenerate-nml.sh @@ -9,6 +9,7 @@ echo "Note(1): Please remember to update the schema from the NeuroML2 repository echo "Note(2): Must be run in neuroml/nml/" NEUROML_VERSION=$(grep -E 'current_neuroml_version.*' ../__init__.py | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') SCHEMA_FILE=NeuroML_${NEUROML_VERSION}.xsd +PYTHON_VERSION=$(python --version) regenerate () { if command -v generateDS > /dev/null 2>&1 @@ -18,7 +19,16 @@ regenerate () { generateDS --version rm -f nml.py - PYTHONPATH="$PYTHONPATH:." generateDS -o nml.py --use-getter-setter=none --user-methods=helper_methods.py $SCHEMA_FILE + + # The version of generateDS for Python 2.7 has a slightly different + # style for arguments. You do not give helper_methods.py, just + # helper_methods + if [ $PYTHON_VERSION =~ "2.7" ] + then + PYTHONPATH="$PYTHONPATH:." generateDS -o nml.py --use-getter-setter=none --user-methods=helper_methods $SCHEMA_FILE + else + PYTHONPATH="$PYTHONPATH:." generateDS -o nml.py --use-getter-setter=none --user-methods=helper_methods.py $SCHEMA_FILE + fi else echo "GenerateDS not installed" echo "Run: pip install generateds" From cd8706185260473642041d5415366dfdef745c70 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 16:57:45 +0100 Subject: [PATCH 069/136] fix: use correct test function for regex check --- neuroml/nml/regenerate-nml.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/nml/regenerate-nml.sh b/neuroml/nml/regenerate-nml.sh index 6c2aa065..e85504c5 100755 --- a/neuroml/nml/regenerate-nml.sh +++ b/neuroml/nml/regenerate-nml.sh @@ -23,7 +23,7 @@ regenerate () { # The version of generateDS for Python 2.7 has a slightly different # style for arguments. You do not give helper_methods.py, just # helper_methods - if [ $PYTHON_VERSION =~ "2.7" ] + if [[ $PYTHON_VERSION =~ "2.7" ]] then PYTHONPATH="$PYTHONPATH:." generateDS -o nml.py --use-getter-setter=none --user-methods=helper_methods $SCHEMA_FILE else From e30413a94728849f397b72c025a1b53924cf6647 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 17:04:00 +0100 Subject: [PATCH 070/136] fix: correct python version extraction --- neuroml/nml/regenerate-nml.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/neuroml/nml/regenerate-nml.sh b/neuroml/nml/regenerate-nml.sh index e85504c5..786027ee 100755 --- a/neuroml/nml/regenerate-nml.sh +++ b/neuroml/nml/regenerate-nml.sh @@ -9,7 +9,7 @@ echo "Note(1): Please remember to update the schema from the NeuroML2 repository echo "Note(2): Must be run in neuroml/nml/" NEUROML_VERSION=$(grep -E 'current_neuroml_version.*' ../__init__.py | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') SCHEMA_FILE=NeuroML_${NEUROML_VERSION}.xsd -PYTHON_VERSION=$(python --version) +PYTHON_VERSION=$(python --version 2>&1) regenerate () { if command -v generateDS > /dev/null 2>&1 @@ -17,13 +17,15 @@ regenerate () { echo "Rebuilding nml.py from ${SCHEMA_FILE} (Version: ${NEUROML_VERSION})" echo "generateds version:" generateDS --version + echo "Python version: $PYTHON_VERSION" rm -f nml.py + # The version of generateDS for Python 2.7 has a slightly different # style for arguments. You do not give helper_methods.py, just # helper_methods - if [[ $PYTHON_VERSION =~ "2.7" ]] + if [[ "$PYTHON_VERSION" =~ "2.7" ]] then PYTHONPATH="$PYTHONPATH:." generateDS -o nml.py --use-getter-setter=none --user-methods=helper_methods $SCHEMA_FILE else From c880d4920252ec844ee2a8537c3d5ce20d53e55e Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 17:04:10 +0100 Subject: [PATCH 071/136] chore: correct black command check --- neuroml/nml/regenerate-nml.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/nml/regenerate-nml.sh b/neuroml/nml/regenerate-nml.sh index 786027ee..ed403259 100755 --- a/neuroml/nml/regenerate-nml.sh +++ b/neuroml/nml/regenerate-nml.sh @@ -39,7 +39,7 @@ regenerate () { } reformat () { - if command -v generateDS > /dev/null 2>&1 + if command -v black > /dev/null 2>&1 then echo "Formatting new nml.py with black" black nml.py From 3959a8922021abbca33e98a4e10c399e492ea510 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 17:06:27 +0100 Subject: [PATCH 072/136] fix: remove exit statements --- neuroml/nml/regenerate-nml.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/neuroml/nml/regenerate-nml.sh b/neuroml/nml/regenerate-nml.sh index ed403259..ca25dc82 100755 --- a/neuroml/nml/regenerate-nml.sh +++ b/neuroml/nml/regenerate-nml.sh @@ -34,7 +34,6 @@ regenerate () { else echo "GenerateDS not installed" echo "Run: pip install generateds" - exit 1 fi } @@ -43,11 +42,9 @@ reformat () { then echo "Formatting new nml.py with black" black nml.py - exit 0 else echo "black is not installed" echo "Run: pip install black" - exit 1 fi } From ac95271656ad36cd7d9a836fde4fd87f7834b8c6 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 17:36:30 +0100 Subject: [PATCH 073/136] feat: make CI fail if generateDS is not found --- neuroml/nml/regenerate-nml.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/neuroml/nml/regenerate-nml.sh b/neuroml/nml/regenerate-nml.sh index ca25dc82..dddb75cb 100755 --- a/neuroml/nml/regenerate-nml.sh +++ b/neuroml/nml/regenerate-nml.sh @@ -34,6 +34,7 @@ regenerate () { else echo "GenerateDS not installed" echo "Run: pip install generateds" + exit 1 fi } From 5cf468ed968cca6f3c9c8bad1bf75c0f4734ca15 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 19:10:29 +0100 Subject: [PATCH 074/136] fix: raise TypeError if file read is not a neuroml file Fixes #113 --- neuroml/loaders.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/neuroml/loaders.py b/neuroml/loaders.py index 52aa7254..6d0c3c7b 100644 --- a/neuroml/loaders.py +++ b/neuroml/loaders.py @@ -25,7 +25,10 @@ class NeuroMLLoader(object): @classmethod def load(cls, src): doc = cls.__nml2_doc(src) - return doc + if isinstance(doc, neuroml.nml.nml.NeuroMLDocument): + return doc + else: + raise TypeError("{} does not appear to be a NeuroML Document.".format(src)) @classmethod def __nml2_doc(cls, file_name): From b7aea153eb598aef135d8975268d6dabd2a8fa30 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 8 Oct 2021 19:11:14 +0100 Subject: [PATCH 075/136] feat: add test for loader reading a non-NeuroML file --- neuroml/examples/test_files/sbml-example.xml | 2404 ++++++++++++++++++ neuroml/test/test_loaders.py | 11 + 2 files changed, 2415 insertions(+) create mode 100644 neuroml/examples/test_files/sbml-example.xml diff --git a/neuroml/examples/test_files/sbml-example.xml b/neuroml/examples/test_files/sbml-example.xml new file mode 100644 index 00000000..dae4e7fd --- /dev/null +++ b/neuroml/examples/test_files/sbml-example.xml @@ -0,0 +1,2404 @@ + + + + + +

This a model from the article:
+ Mathematical model of the morphogenesis checkpoint in budding yeast. +
+ Ciliberto A, Novak B, Tyson JJ J. Cell Biol. + [2003 Dec; Volume: 163 (Issue: 6 )] Page info: 1243-54 14691135 + ,
+ Abstract: +
+ The morphogenesis checkpoint in budding yeast delays progression through the cell cycle in response to stimuli that prevent bud formation. Central to the checkpoint mechanism is Swe1 kinase: normally inactive, its activation halts cell cycle progression in G2. We propose a molecular network for Swe1 control, based on published observations of budding yeast and analogous control signals in fission yeast. The proposed Swe1 network is merged with a model of cyclin-dependent kinase regulation, converted into a set of differential equations and studied by numerical simulation. The simulations accurately reproduce the phenotypes of a dozen checkpoint mutants. Among other predictions, the model attributes a new role to Hsl1, a kinase known to play a role in Swe1 degradation: Hsl1 must also be indirectly responsible for potent inhibition of Swe1 activity. The model supports the idea that the morphogenesis checkpoint, like other checkpoints, raises the cell size threshold for progression from one phase of the cell cycle to the next.

+

+ The model reproduces Fig 3 of the paper.

+

+ This model originates from BioModels Database: A Database of Annotated Published Models (http://www.ebi.ac.uk/biomodels/). It is copyright (c) 2005-2011 The BioModels.net Team.
+ For more information see the terms of use + .
+ To cite BioModels Database, please use: Li C, Donizelli M, Rodriguez N, Dharuri H, Endler L, Chelliah V, Li L, He E, Henry A, Stefan MI, Snoep JL, Hucka M, Le Novère N, Laibe C (2010) BioModels Database: An enhanced, curated and annotated resource for published quantitative kinetic models. BMC Syst Biol., 4:92. +

+ + +
+ + + + + + + + Dharuri + Harish + + hdharuri@cds.caltech.edu + + California Institute of Technology + + + + + Chelliah + Vijayalakshmi + + viji@ebi.ac.uk + + EMBL-EBI + + + + + + 2008-02-07T14:15:40Z + + + 2012-01-31T13:52:48Z + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kswe_prime + Swe1 + + + + kswe_doubleprime + Swe1M + + + + kswe_tripleprime + PSwe1 + + + + + + + + + + + kmih_prime + Mih_ast + + + + kmih_doubleprime + Mih + + + + + + + + + Swe1 + Swe1M + PSwe1 + PSwe1M + + + + + + + + IEtot + IE + + + + + + + + Cdh1tot + Cdh1 + + + + + + + + Mih1tot + Mih1a + + + + + + + + Mcmtot + Mcm + + + + + + + + SBFtot + SBF + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kdiss + Trim + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Trim + + + + + kdsic_prime + Cln + + + + kdsic_doubleprime + Clb + + kdsic + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Trim + + + + + kdclb_doubleprime + Cdh1 + + + + kdclb_tripleprime + Cdc20a + + kdclb_prime + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Trim + kswe + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kass + Sic + Clb + + + + + + + + + + + + + + + + + + + + + + + + + + + + PTrim + kmih + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Clb + + + + + kdclb_doubleprime + Cdh1 + + + + kdclb_tripleprime + Cdc20a + + kdclb_prime + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kswe + Clb + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ksclb + mass + Jm + + + eps + Mcm + + + + + mass + Jm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kmih + PClb + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + SBF + + + kisbf_prime + + + kisbf_doubleprime + Clb + + + + + + jisbf + SBF + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + SBFin + + + + + kasbf_prime + mass + + + + kasbf_doubleprime + Cln + + + + + + jasbf + SBFin + + + + + + + + + + + + + + + + + + + + + + + + + + + + IE + kiie + + + + jiie + IE + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kaie + IEin + Clb + + + + jaie + IEin + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cdc20a + kicdc20 + + + + jicdc20 + Cdc20a + + + + + + + + + + + + + + + + + + + + + + + + + + kdcdc20 + Cdc20a + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kacdc20 + Cdc20 + IE + + + + jacdc20 + Cdc20 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cdh1 + + + + + kicdh + Clb + + + + kicdh_prime + Cln + + + + + + jicdh + Cdh1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cdh1in + + + Kacdh_prime + + + Kacdh_doubleprime + Cdc20a + + + + + + jacdh + Cdh1in + + + + + + + + + + + + + + + + + + + + + + + + + + + + + khsl1 + BUD + Swe1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + khsl1 + BUD + PSwe1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Viwee + Swe1 + Clb + + + + Jiwee + Swe1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Viwee + Swe1M + Clb + + + + Jiwee + Swe1M + + + + + + + + + + + + + + + + + + + + + + + + + + kdswe_prime + Swe1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + khsl1r + Swe1M + + + + + + + + + + + + + + + + + + + + + + + + + + + + khsl1r + PSwe1M + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PSwe1 + Vawee + + + + Jawee + PSwe1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PSwe1M + Vawee + + + + Jawee + PSwe1M + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ksswe + SBF + + + + + + + + + + + + + + + + + + + + + + + kssweC + + + + + + + + + + + + + + + + + + + + + + + + kdswe_prime + PSwe1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kdiss + PTrim + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PTrim + + + + + kdclb_doubleprime + Cdh1 + + + + kdclb_tripleprime + Cdc20a + + kdclb_prime + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PTrim + + + + + kdsic_prime + Cln + + + + kdsic_doubleprime + Clb + + kdsic + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kass + PClb + Sic + + + + + + + + + + + + + + + + + + + + + + + + + + + Mih1a + Vimih + + + + jimih + Mih1a + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vamih + Mih1 + Clb + + + + Jamih + Mih1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + Mcm + kimcm + + + + jimcm + Mcm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Mcmin + Clb + kamcm + + + + jamcm + Mcmin + + + + + + + + + + + + + + + + + + + + + + + + + + kdbud + BE + + + + + + + + + + + + + + + + ksbud + Cln + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Sic + + + + + kdsic_prime + Cln + + + + kdsic_doubleprime + Clb + + kdsic + + + + + + + + + + + + + + + + + + + + + + + + kssic + + + + + + + + + + + + + + + + + + + + + + + + kdcln + Cln + + + + + + + + + + + + + + + + + + + + + + + + + + + + kscln + SBF + + + + + + + + + + + + + + + + + + + + + + + + + kdswe_prime + Swe1M + + + + + + + + + + + + + + + + + + + + + + + + + kdcdc20 + Cdc20 + + + + + + + + + + + + + + + + + + + + + + + + + + + + kscdc20_prime + + + + + kscdc20_doubleprime + + + Clb + 4 + + + + + + + jscdc20 + 4 + + + + Clb + 4 + + + + + + + + + + + + + + + + + + + + + + + + + + + + kdswe_doubleprime + PSwe1M + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PClb + + + + + kdclb_doubleprime + Cdh1 + + + + kdclb_tripleprime + Cdc20a + + kdclb_prime + + + + + + + + + + + + + + + + + + + + + + + + + + mu + mass + + + + + + + + + + + + Clb + 0.2 + + + + + + + 1 + + + + + + + + + + + + Clb + 0.2 + + + + flag + 0 + + + + + + + + 0 + + + + + + + 0.5 + mass + + + + + + + + + + + + + Clb + 0.2 + + + + BE + 0.6 + + + + + + + + 1 + + + + + + + + + + + + Clb + 0.2 + + + + BE + 0.6 + + + + + + + + 0 + + + + + +
+
\ No newline at end of file diff --git a/neuroml/test/test_loaders.py b/neuroml/test/test_loaders.py index b960a121..8e98d4bf 100644 --- a/neuroml/test/test_loaders.py +++ b/neuroml/test/test_loaders.py @@ -27,6 +27,17 @@ def test_load_neuroml(self): f.close() print("Finished test") + def test_non_neuroml_file(self): + """Test an non-NeuroML document.""" + root_dir = os.path.dirname(neuroml.__file__) + test_file_path = os.path.join(root_dir, "examples/test_files/sbml-example.xml") + print("test file path is: " + test_file_path) + + try: + loaders.NeuroMLLoader.load(test_file_path) + except TypeError: + print("Exception raised. Test passes.") + if __name__ == "__main__": t = TestNeuroMLLoader() From f201ac9e972ba33e57a63379580d763832eafafb Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 11 Oct 2021 18:32:56 +0100 Subject: [PATCH 076/136] feat: improve error message --- neuroml/loaders.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/loaders.py b/neuroml/loaders.py index 6d0c3c7b..db006b45 100644 --- a/neuroml/loaders.py +++ b/neuroml/loaders.py @@ -28,7 +28,7 @@ def load(cls, src): if isinstance(doc, neuroml.nml.nml.NeuroMLDocument): return doc else: - raise TypeError("{} does not appear to be a NeuroML Document.".format(src)) + raise TypeError("{} does not appear to be a NeuroML Document. NeuroML documents must be contained in a tag.".format(src)) @classmethod def __nml2_doc(cls, file_name): From e300042b3681924819175d608518849a3a31d058 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 13 Oct 2021 13:32:11 +0100 Subject: [PATCH 077/136] feat: add method to add child/children elements to objects - also adds a corresponding method to get some information about them The function is added to `BaseWithoutId` and so should be inherited by everything. The `add` method: - gets the list of the members defined by generateDS for each each class - checks the type of the object being added to the types of the members to find the right member to add to. If a member with the type cannot be found, an Exception is raised. - if a member is found, it can either be a child member (can only have one value) or a children member (can have multiple values in a list). - For child members a check is made to see if a value is already set. If not, the value is set to the given object. - For children members, a check is made to see if the object has already been added. If not, the object is added. - If either test fails, an Exception is raised (since one does not need to re-add objects to update their value) - A `force` option allows overwriting and readdition, but is set to `False` by default. So, example usage would be of this type: ``` cell = neuroml.Cell(id="acell") biprop = neuroml.BiophysicalProperties(id="biprop") cell.add(biprop) # success cell.add(biprop) # will throw an Exception unless `force=True` cell.add("a string") # will throw an Exception since no member has a type "str" ``` Note: we have a few ComponentTypes which don't inherit from anywhere, and we need to check if these are correctly defined in the XSD file (Location is an example). --- neuroml/nml/helper_methods.py | 95 +++++++++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index bc565e98..bd6b797b 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -222,6 +222,99 @@ def surface_area(self): class_names=("Segment"), ) +generic_add = MethodSpec( + name="add", + source='''\ + + def add(self, obj, force=False): + """Generic function to allow easy addition of a new member to a NeuroML object. + + :param obj: object member to add + :type obj: any NeuroML Type defined by the API + :param force: boolean to force addition when an obj has already been added previously + :type force: bool + + :raises Exception: if a member compatible to obj could not be found + :raises Exception: if a member that takes a single value is already set (and force is not set to True) + :raises Exception: if a member that takes a list already includes obj (and force is not set to True) + """ + # getattr only returns the value of the provided member but one cannot + # then use this to modify the member. Using `vars` also allows us to + # modify the value + found = False + for member in self.member_data_items_: + # get_data_type() returns the type as a string, e.g.: 'IncludeType' + if member.get_data_type() == type(obj).__name__: + # A single value, not a list: + if member.get_container() == 0: + if vars(self)[member.get_name()]: + if force: + vars(self)[member.get_name()] = obj + else: + raise Exception("""{} has already been assigned. + Use `force=True` to overwrite. Hint: you can make + changes to the already added object as required + without needing to re-add it because only + references to the objects are added, not their + values.""".format(member.get_name())) + else: + vars(self)[member.get_name()] = obj + print("Added {} to {}".format(obj, member.get_name())) + # List + else: + if obj in vars(self)[member.get_name()]: + if force: + vars(self)[member.get_name()].append(obj) + else: + raise Exception("""{} already exists in {}. Use + `force=True` to force readdition. Hint: you can + make changes to the already added object as + required without needing to re-add it because only + references to the objects are added, not their + values.""".format(obj.id, member.get_name())) + else: + vars(self)[member.get_name()].append(obj) + found = True + break + if not found: + e = Exception( + """A member object of {} type could not be found in this class. + Please check the NeuroML schema at https://docs.neuroml.org to + confirm that this member belongs to this NeuroML element.""".format(type(obj).__name__)) + raise e + ''', + class_names=("BaseWithoutId"), +) + +generic_list = MethodSpec( + name="get_members_info", + source='''\ + + def get_members_info(self, show_contents=False): + """A helper function to get a list of members of this class. + + This is useful to quickly check what members can go into a particular + NeuroML class (which will match the Schema definitions). It lists these + members and notes whether they are "single" type elements (Child + elements) or "List" elements (Children elements). It will also note + whether a member is optional or required. + + See http://www.davekuhlman.org/generateDS.html#user-methods for more + information on the MemberSpec_ class that generateDS uses. + + :param show_contents: also prints out the contents of the members + :type show_contents: bool + """ + + for member in self.member_data_items_: + print("{} (class: {})".format(member.name, member.data_type)) + if show_contents: + contents = getattr(self, member.get_name()) + print("Contents: {}".format(contents)) + + ''', + class_names=("BaseWithoutId"), +) # # Provide a list of your method specifications. # This list of specifications must be named METHOD_SPECS. @@ -231,6 +324,8 @@ def surface_area(self): volume, surface_area, num_segments, + generic_add, + generic_list ) From 6ab810c2210893c5243aa597f52b18df3448029c Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 13 Oct 2021 13:36:21 +0100 Subject: [PATCH 078/136] chore: regenerate nml.py --- neuroml/nml/nml.py | 95 ++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 92 insertions(+), 3 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index f3f842da..613cf5d2 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,8 +2,8 @@ # -*- coding: utf-8 -*- # -# Generated Fri Oct 8 15:17:16 2021 by generateDS.py version 2.40.3. -# Python 3.10.0rc2 (default, Sep 8 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] +# Generated Wed Oct 13 13:33:32 2021 by generateDS.py version 2.40.3. +# Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: # ('-o', 'nml.py') @@ -14475,8 +14475,97 @@ def _buildChildren( ): pass + def add(self, obj, force=False): + """Generic function to allow easy addition of a new member to a NeuroML object. -# end class BaseWithoutId + :param obj: object member to add + :type obj: any NeuroML Type defined by the API + :param force: boolean to force addition when an obj has already been added previously + :type force: bool + + :raises Exception: if a member compatible to obj could not be found + :raises Exception: if a member that takes a single value is already set (and force is not set to True) + :raises Exception: if a member that takes a list already includes obj (and force is not set to True) + """ + # getattr only returns the value of the provided member but one cannot + # then use this to modify the member. Using `vars` also allows us to + # modify the value + found = False + for member in self.member_data_items_: + # get_data_type() returns the type as a string, e.g.: 'IncludeType' + if member.get_data_type() == type(obj).__name__: + # A single value, not a list: + if member.get_container() == 0: + if vars(self)[member.get_name()]: + if force: + vars(self)[member.get_name()] = obj + else: + raise Exception( + """{} has already been assigned. + Use `force=True` to overwrite. Hint: you can make + changes to the already added object as required + without needing to re-add it because only + references to the objects are added, not their + values.""".format( + member.get_name() + ) + ) + else: + vars(self)[member.get_name()] = obj + print("Added {} to {}".format(obj, member.get_name())) + # List + else: + if obj in vars(self)[member.get_name()]: + if force: + vars(self)[member.get_name()].append(obj) + else: + raise Exception( + """{} already exists in {}. Use + `force=True` to force readdition. Hint: you can + make changes to the already added object as + required without needing to re-add it because only + references to the objects are added, not their + values.""".format( + obj.id, member.get_name() + ) + ) + else: + vars(self)[member.get_name()].append(obj) + found = True + break + if not found: + e = Exception( + """A member object of {} type could not be found in this class. + Please check the NeuroML schema at https://docs.neuroml.org to + confirm that this member belongs to this NeuroML element.""".format( + type(obj).__name__ + ) + ) + raise e + + def get_members_info(self, show_contents=False): + """A helper function to get a list of members of this class. + + This is useful to quickly check what members can go into a particular + NeuroML class (which will match the Schema definitions). It lists these + members and notes whether they are "single" type elements (Child + elements) or "List" elements (Children elements). It will also note + whether a member is optional or required. + + See http://www.davekuhlman.org/generateDS.html#user-methods for more + information on the MemberSpec_ class that generateDS uses. + + :param show_contents: also prints out the contents of the members + :type show_contents: bool + """ + + for member in self.member_data_items_: + print("{} (class: {})".format(member.name, member.data_type)) + if show_contents: + contents = getattr(self, member.get_name()) + print("Contents: {}".format(contents)) + + # end class BaseWithoutId class BaseNonNegativeIntegerId(BaseWithoutId): From 06329f01da6c0898c44c84c500db525d654e478a Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 13 Oct 2021 13:48:38 +0100 Subject: [PATCH 079/136] chore: reformat Exception strings --- neuroml/nml/helper_methods.py | 14 ++------------ neuroml/nml/nml.py | 16 +++------------- 2 files changed, 5 insertions(+), 25 deletions(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index bd6b797b..6049fc2e 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -251,12 +251,7 @@ def add(self, obj, force=False): if force: vars(self)[member.get_name()] = obj else: - raise Exception("""{} has already been assigned. - Use `force=True` to overwrite. Hint: you can make - changes to the already added object as required - without needing to re-add it because only - references to the objects are added, not their - values.""".format(member.get_name())) + raise Exception("""{} has already been assigned. Use `force=True` to overwrite. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(member.get_name())) else: vars(self)[member.get_name()] = obj print("Added {} to {}".format(obj, member.get_name())) @@ -266,12 +261,7 @@ def add(self, obj, force=False): if force: vars(self)[member.get_name()].append(obj) else: - raise Exception("""{} already exists in {}. Use - `force=True` to force readdition. Hint: you can - make changes to the already added object as - required without needing to re-add it because only - references to the objects are added, not their - values.""".format(obj.id, member.get_name())) + raise Exception("""{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(obj.id, member.get_name())) else: vars(self)[member.get_name()].append(obj) found = True diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 613cf5d2..b3c9ed7d 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Wed Oct 13 13:33:32 2021 by generateDS.py version 2.40.3. +# Generated Wed Oct 13 13:47:40 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -14501,12 +14501,7 @@ def add(self, obj, force=False): vars(self)[member.get_name()] = obj else: raise Exception( - """{} has already been assigned. - Use `force=True` to overwrite. Hint: you can make - changes to the already added object as required - without needing to re-add it because only - references to the objects are added, not their - values.""".format( + """{} has already been assigned. Use `force=True` to overwrite. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format( member.get_name() ) ) @@ -14520,12 +14515,7 @@ def add(self, obj, force=False): vars(self)[member.get_name()].append(obj) else: raise Exception( - """{} already exists in {}. Use - `force=True` to force readdition. Hint: you can - make changes to the already added object as - required without needing to re-add it because only - references to the objects are added, not their - values.""".format( + """{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format( obj.id, member.get_name() ) ) From 99a951763870aebc889dea6a8325de87220838d2 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 13 Oct 2021 15:35:55 +0100 Subject: [PATCH 080/136] feat: tweak member_info method to also return the string This will be useful for testing purposes. --- neuroml/nml/helper_methods.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index 6049fc2e..a5f46c1a 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -294,14 +294,19 @@ def get_members_info(self, show_contents=False): :param show_contents: also prints out the contents of the members :type show_contents: bool + + :returns: the string (for testing purposes) """ + info_str = "" for member in self.member_data_items_: - print("{} (class: {})".format(member.name, member.data_type)) + info_str += ("{} (class: {})\\n".format(member.name, member.data_type)) if show_contents: contents = getattr(self, member.get_name()) - print("Contents: {}".format(contents)) + info_str += ("Contents: {}\\n\\n".format(contents)) + print(info_str) + return info_str ''', class_names=("BaseWithoutId"), ) From 1394d12980ad7e53e0442e655c8fec843f1ce2b0 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 13 Oct 2021 15:36:17 +0100 Subject: [PATCH 081/136] chore: regenerate nml.py --- neuroml/nml/nml.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index b3c9ed7d..a7f50eca 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Wed Oct 13 13:47:40 2021 by generateDS.py version 2.40.3. +# Generated Wed Oct 13 15:25:38 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -14547,13 +14547,19 @@ def get_members_info(self, show_contents=False): :param show_contents: also prints out the contents of the members :type show_contents: bool + + :returns: the string (for testing purposes) """ + info_str = "" for member in self.member_data_items_: - print("{} (class: {})".format(member.name, member.data_type)) + info_str += "{} (class: {})\n".format(member.name, member.data_type) if show_contents: contents = getattr(self, member.get_name()) - print("Contents: {}".format(contents)) + info_str += "Contents: {}\n\n".format(contents) + + print(info_str) + return info_str # end class BaseWithoutId From 62a7cc42713ca4c9af9f851271cb18e048575249 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 13 Oct 2021 15:36:28 +0100 Subject: [PATCH 082/136] test(nml.py): add basic tests for new helper functions - get_member_info - add --- neuroml/test/test_nml.py | 64 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 neuroml/test/test_nml.py diff --git a/neuroml/test/test_nml.py b/neuroml/test/test_nml.py new file mode 100644 index 00000000..8908a098 --- /dev/null +++ b/neuroml/test/test_nml.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +""" +Enter one line description here. + +File: + +Copyright 2021 Ankur Sinha +Author: Ankur Sinha +""" + + +import neuroml +try: + import unittest2 as unittest +except ImportError: + import unittest + + +class TestNML(unittest.TestCase): + + """Tests related to nml.py""" + + def test_generic_add(self): + """Test the generic add function. + :returns: TODO + + """ + doc = neuroml.NeuroMLDocument(id="testdoc") + cell = neuroml.Cell(id="testcell") + cell1 = neuroml.Cell(id="testcell1") + biprop = neuroml.BiophysicalProperties(id="biprops") + net = neuroml.Network(id="net") + + # Success: returns nothing (None) + self.assertIsNone(doc.add(cell)) + + # Already added, so throw exception + with self.assertRaises(Exception): + doc.add(cell) + + # Success + self.assertIsNone(doc.add(cell1)) + + # str is not the type for any member + with self.assertRaises(Exception): + doc.add("A STRING") + + # success + self.assertIsNone(cell.add(biprop)) + self.assertIsNone(cell1.add(biprop)) + + # failures + with self.assertRaises(Exception): + cell.add(net) + with self.assertRaises(Exception): + biprop.add(net) + + def test_member_info(self): + """Test getting member info.""" + cell = neuroml.Cell(id="testcell") + info = cell.get_members_info() + self.assertRegex(info, 'morphology') + self.assertRegex(info, 'biophysical_properties') + self.assertNotRegex(info, 'network') From daf37629b82b42ca28de71e62e7c701c2aa02a7a Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Thu, 14 Oct 2021 15:52:43 +0100 Subject: [PATCH 083/136] feat: add gitter chat badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index fb1a24ed..2f7d8cab 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ [![GitHub issues](https://img.shields.io/github/issues/NeuralEnsemble/libNeuroML)](https://github.com/NeuralEnsemble/libNeuroML/issues) [![GitHub Org's stars](https://img.shields.io/github/stars/NeuralEnsemble?style=social)](https://github.com/NeuralEnsemble) [![Twitter Follow](https://img.shields.io/twitter/follow/NeuroML?style=social)](https://twitter.com/NeuroML) +[![Gitter](https://badges.gitter.im/NeuroML/community.svg)](https://gitter.im/NeuroML/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) This package provides Python libNeuroML, for working with neuronal models specified in [NeuroML 2](http://neuroml.org/neuromlv2). From 656c45b4d6236631b6e8dded65b8641e2ce64a20 Mon Sep 17 00:00:00 2001 From: Padraig Gleeson Date: Mon, 18 Oct 2021 16:59:15 +0100 Subject: [PATCH 084/136] To v0.2.58 Some more files in black --- doc/conf.py | 152 ++++++++++++++++++++--------------- doc/helpers/nml-core-docs.py | 29 +++++-- neuroml/__init__.py | 2 +- neuroml/loaders.py | 6 +- 4 files changed, 112 insertions(+), 77 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index c981929c..712fad3e 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -17,41 +17,45 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) +# sys.path.insert(0, os.path.abspath('.')) -sys.path.insert(0, os.path.abspath('../neuroml')) -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("../neuroml")) +sys.path.insert(0, os.path.abspath("..")) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', - 'sphinxcontrib.bibtex'] +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.doctest", + "sphinx.ext.todo", + "sphinxcontrib.bibtex", +] -bibtex_bibfiles = ['refs.bib'] +bibtex_bibfiles = ["refs.bib"] # Include TODOs in docs todo_include_todos = True # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'libNeuroML' -copyright = u'2021, libNeuroML authors and contributors' +project = u"libNeuroML" +copyright = u"2021, libNeuroML authors and contributors" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -59,11 +63,11 @@ # # The short X.Y version. version = "" -for aline in open('../neuroml/__init__.py'): +for aline in open("../neuroml/__init__.py"): # space here is important since __version__ is used in generation of # version_info also - if '__version__ =' in aline: - version = aline.split("\"")[1] + if "__version__ =" in aline: + version = aline.split('"')[1] # The full version, including alpha/beta/rc tags. release = version @@ -77,44 +81,44 @@ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'pydata_sphinx_theme' +html_theme = "pydata_sphinx_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -124,10 +128,8 @@ "use_edit_page_button": True, "show_toc_level": 1, "external_links": [ - { - "name": "NeuroML Documentation", "url": "https://docs.neuroml.org" - }, - ] + {"name": "NeuroML Documentation", "url": "https://docs.neuroml.org"}, + ], } html_context = { @@ -137,72 +139,72 @@ } # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -html_logo = '_static/neuroml_logo.png' +html_logo = "_static/neuroml_logo.png" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'libNeuroMLdoc' +htmlhelp_basename = "libNeuroMLdoc" # -- Options for LaTeX output -------------------------------------------------- @@ -220,29 +222,34 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'libNeuroML.tex', 'libNeuroML Documentation', - 'libNeuroML authors and contributors', 'manual'), + ( + "index", + "libNeuroML.tex", + "libNeuroML Documentation", + "libNeuroML authors and contributors", + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output -------------------------------------------- @@ -250,12 +257,17 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'libneuroml', 'libNeuroML Documentation', - ['libNeuroML authors and contributors'], 1) + ( + "index", + "libneuroml", + "libNeuroML Documentation", + ["libNeuroML authors and contributors"], + 1, + ) ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ @@ -264,16 +276,22 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'libNeuroML', 'libNeuroML Documentation', - 'libNeuroML authors and contributors', 'libNeuroML', 'This package provides libNeuroML for working with neuronal models specified in NeuroML 2.', - 'Miscellaneous'), + ( + "index", + "libNeuroML", + "libNeuroML Documentation", + "libNeuroML authors and contributors", + "libNeuroML", + "This package provides libNeuroML for working with neuronal models specified in NeuroML 2.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' diff --git a/doc/helpers/nml-core-docs.py b/doc/helpers/nml-core-docs.py index a59aa3c4..cb970b8d 100644 --- a/doc/helpers/nml-core-docs.py +++ b/doc/helpers/nml-core-docs.py @@ -12,24 +12,35 @@ import textwrap -excluded_classes = ['GDSParseError', 'MixedContainer', 'MemberSpec_', - 'BlockTypes', 'Metric', 'PlasticityTypes', 'ZeroOrOne', 'allowedSpaces', - 'channelTypes', 'gateTypes', 'networkTypes', 'populationTypes'] +excluded_classes = [ + "GDSParseError", + "MixedContainer", + "MemberSpec_", + "BlockTypes", + "Metric", + "PlasticityTypes", + "ZeroOrOne", + "allowedSpaces", + "channelTypes", + "gateTypes", + "networkTypes", + "populationTypes", +] classes = [] -with open("../../neuroml/nml/nml.py", 'r') as file: +with open("../../neuroml/nml/nml.py", "r") as file: lines = file.readlines() for line in lines: if line.startswith("class"): # get the class signature - aclass = line[len("class "):] - aclass = aclass.split('(')[0].split(':')[0] + aclass = line[len("class ") :] + aclass = aclass.split("(")[0].split(":")[0] if aclass not in excluded_classes: classes.append(aclass) classes.sort() -with open("../userdocs/coreclasses_list.txt", 'w') as fwrite: +with open("../userdocs/coreclasses_list.txt", "w") as fwrite: print(".. Generated using nml-core-docs.py", file=fwrite) for aclass in classes: towrite = textwrap.dedent( @@ -42,7 +53,9 @@ :undoc-members: """.format( - aclass, "#" * len(aclass), aclass, + aclass, + "#" * len(aclass), + aclass, ) ) print(towrite, file=fwrite) diff --git a/neuroml/__init__.py b/neuroml/__init__.py index 80bde0e5..791361a7 100644 --- a/neuroml/__init__.py +++ b/neuroml/__init__.py @@ -1,6 +1,6 @@ from .nml.nml import * # allows importation of all neuroml classes -__version__ = "0.2.57" +__version__ = "0.2.58" __version_info__ = tuple(int(i) for i in __version__.split(".")) diff --git a/neuroml/loaders.py b/neuroml/loaders.py index db006b45..3f20346d 100644 --- a/neuroml/loaders.py +++ b/neuroml/loaders.py @@ -28,7 +28,11 @@ def load(cls, src): if isinstance(doc, neuroml.nml.nml.NeuroMLDocument): return doc else: - raise TypeError("{} does not appear to be a NeuroML Document. NeuroML documents must be contained in a tag.".format(src)) + raise TypeError( + "{} does not appear to be a NeuroML Document. NeuroML documents must be contained in a tag.".format( + src + ) + ) @classmethod def __nml2_doc(cls, file_name): From cf9641640aa70a74a38fd4d1014552052407dfb9 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 18 Oct 2021 17:37:55 +0100 Subject: [PATCH 085/136] ci: ignore nml.py for flake --- .github/workflows/ci.yml | 4 ++-- setup.cfg | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b2d30600..e4346baf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -47,6 +47,6 @@ jobs: - name: Lint with flake8 run: | # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + flake8 . --count --select=E9,F63,F7,F82 --exclude=neuroml/nml/nml.py --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + flake8 . --count --exit-zero --exclude=neuroml/nml/nml.py --max-complexity=10 --max-line-length=127 --statistics diff --git a/setup.cfg b/setup.cfg index bfeeeedd..61139e22 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,4 +3,6 @@ # spacing around operators, comment blocks, in argument lists # lines too long ignore = E501,E502,F403,F405,E231,E228,E225,E226,E265,E261 -exclude = neuroml/nml/nml.py,doc +exclude = + neuroml/nml/nml.py, + doc From e2b565cab1b67ca209257e8a80476f11cf0ba315 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 18 Oct 2021 17:42:09 +0100 Subject: [PATCH 086/136] ci: also ignore build dir --- .github/workflows/ci.yml | 4 ++-- setup.cfg | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e4346baf..b2d30600 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -47,6 +47,6 @@ jobs: - name: Lint with flake8 run: | # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --exclude=neuroml/nml/nml.py --show-source --statistics + flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --exclude=neuroml/nml/nml.py --max-complexity=10 --max-line-length=127 --statistics + flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics diff --git a/setup.cfg b/setup.cfg index 61139e22..ebb6c9d1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -5,4 +5,6 @@ ignore = E501,E502,F403,F405,E231,E228,E225,E226,E265,E261 exclude = neuroml/nml/nml.py, - doc + doc, + build, + __pycache__ From e92314322acdc4b786b05281d36aa6528f14e627 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 26 Oct 2021 17:37:48 +0100 Subject: [PATCH 087/136] feat: update name of helper function --- neuroml/nml/helper_methods.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index a5f46c1a..fcad255d 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -268,19 +268,18 @@ def add(self, obj, force=False): break if not found: e = Exception( - """A member object of {} type could not be found in this class. - Please check the NeuroML schema at https://docs.neuroml.org to - confirm that this member belongs to this NeuroML element.""".format(type(obj).__name__)) + """A member object of {} type could not be found in this class.\\n + {}.""".format(type(obj).__name__), self.info()) raise e ''', class_names=("BaseWithoutId"), ) generic_list = MethodSpec( - name="get_members_info", + name="info", source='''\ - def get_members_info(self, show_contents=False): + def info(self, show_contents=False): """A helper function to get a list of members of this class. This is useful to quickly check what members can go into a particular From 8454b159581d004d9e1204b9da082b1dd602a101 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 26 Oct 2021 17:38:04 +0100 Subject: [PATCH 088/136] feat: improve output from info method --- neuroml/nml/helper_methods.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index fcad255d..bf8403bc 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -297,7 +297,7 @@ def info(self, show_contents=False): :returns: the string (for testing purposes) """ - info_str = "" + info_str = "Valid members for {} are:\\n".format(self.__class__.__name__) for member in self.member_data_items_: info_str += ("{} (class: {})\\n".format(member.name, member.data_type)) if show_contents: From 88ef5d3a0ae5bf21ebcda0e901730a36a27223e0 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 26 Oct 2021 17:39:34 +0100 Subject: [PATCH 089/136] feat: make add show current obj information without any arguments So users can primarily just use `add()`. --- neuroml/nml/helper_methods.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index bf8403bc..03052893 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -226,9 +226,15 @@ def surface_area(self): name="add", source='''\ - def add(self, obj, force=False): + def add(self, obj=None, force=False): """Generic function to allow easy addition of a new member to a NeuroML object. + Without arguments, when `obj=None`, it simply calls the `info()` method + to provide the list of valid member types for the NeuroML class. + + Use `info(show_contents=True)` to see the valid members of this class, + and their current contents. + :param obj: object member to add :type obj: any NeuroML Type defined by the API :param force: boolean to force addition when an obj has already been added previously @@ -238,6 +244,10 @@ def add(self, obj, force=False): :raises Exception: if a member that takes a single value is already set (and force is not set to True) :raises Exception: if a member that takes a list already includes obj (and force is not set to True) """ + if not obj: + self.info() + return + # getattr only returns the value of the provided member but one cannot # then use this to modify the member. Using `vars` also allows us to # modify the value From 3fe4bcb0a2b931979b69e8f4260157e50ff83701 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 26 Oct 2021 17:51:26 +0100 Subject: [PATCH 090/136] chore: regenerate nml.py --- neuroml/nml/nml.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index a7f50eca..dbf63a0f 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Wed Oct 13 15:25:38 2021 by generateDS.py version 2.40.3. +# Generated Tue Oct 26 17:40:52 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -14475,9 +14475,15 @@ def _buildChildren( ): pass - def add(self, obj, force=False): + def add(self, obj=None, force=False): """Generic function to allow easy addition of a new member to a NeuroML object. + Without arguments, when `obj=None`, it simply calls the `info()` method + to provide the list of valid member types for the NeuroML class. + + Use `info(show_contents=True)` to see the valid members of this class, + and their current contents. + :param obj: object member to add :type obj: any NeuroML Type defined by the API :param force: boolean to force addition when an obj has already been added previously @@ -14487,6 +14493,10 @@ def add(self, obj, force=False): :raises Exception: if a member that takes a single value is already set (and force is not set to True) :raises Exception: if a member that takes a list already includes obj (and force is not set to True) """ + if not obj: + self.info() + return + # getattr only returns the value of the provided member but one cannot # then use this to modify the member. Using `vars` also allows us to # modify the value @@ -14525,15 +14535,15 @@ def add(self, obj, force=False): break if not found: e = Exception( - """A member object of {} type could not be found in this class. - Please check the NeuroML schema at https://docs.neuroml.org to - confirm that this member belongs to this NeuroML element.""".format( + """A member object of {} type could not be found in this class.\n + {}.""".format( type(obj).__name__ - ) + ), + self.info(), ) raise e - def get_members_info(self, show_contents=False): + def info(self, show_contents=False): """A helper function to get a list of members of this class. This is useful to quickly check what members can go into a particular @@ -14551,7 +14561,7 @@ def get_members_info(self, show_contents=False): :returns: the string (for testing purposes) """ - info_str = "" + info_str = "Valid members for {} are:\n".format(self.__class__.__name__) for member in self.member_data_items_: info_str += "{} (class: {})\n".format(member.name, member.data_type) if show_contents: From 29f69020cf62460371614de19da41e3aa84953bc Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 26 Oct 2021 17:57:01 +0100 Subject: [PATCH 091/136] test: update method call --- neuroml/test/test_nml.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/neuroml/test/test_nml.py b/neuroml/test/test_nml.py index 8908a098..8becc44c 100644 --- a/neuroml/test/test_nml.py +++ b/neuroml/test/test_nml.py @@ -55,10 +55,10 @@ def test_generic_add(self): with self.assertRaises(Exception): biprop.add(net) - def test_member_info(self): + def test_info(self): """Test getting member info.""" cell = neuroml.Cell(id="testcell") - info = cell.get_members_info() + info = cell.info() self.assertRegex(info, 'morphology') self.assertRegex(info, 'biophysical_properties') self.assertNotRegex(info, 'network') From 886b82d7481942715555614e8b23c216b0d9375c Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 26 Oct 2021 18:20:32 +0100 Subject: [PATCH 092/136] feat: improve output string --- neuroml/nml/helper_methods.py | 9 +++++---- neuroml/nml/nml.py | 3 ++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index 03052893..30832bc0 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -278,8 +278,8 @@ def add(self, obj=None, force=False): break if not found: e = Exception( - """A member object of {} type could not be found in this class.\\n - {}.""".format(type(obj).__name__), self.info()) + """A member object of {} type could not be found in NeuroML class {}.\\n{} + """.format(type(obj).__name__, type(self).__name__, self.info())) raise e ''', class_names=("BaseWithoutId"), @@ -309,11 +309,12 @@ def info(self, show_contents=False): info_str = "Valid members for {} are:\\n".format(self.__class__.__name__) for member in self.member_data_items_: - info_str += ("{} (class: {})\\n".format(member.name, member.data_type)) + info_str += ("* {} (class: {})\\n".format(member.name, member.data_type)) if show_contents: contents = getattr(self, member.get_name()) - info_str += ("Contents: {}\\n\\n".format(contents)) + info_str += ("\t* Contents: {}\\n\\n".format(contents)) + info_str += "Please see the NeuroML standard schema documentation at https://docs.neuroml.org/Userdocs/NeuroMLv2.html for more information." print(info_str) return info_str ''', diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index dbf63a0f..c92d2253 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Tue Oct 26 17:40:52 2021 by generateDS.py version 2.40.3. +# Generated Tue Oct 26 17:59:14 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -14568,6 +14568,7 @@ def info(self, show_contents=False): contents = getattr(self, member.get_name()) info_str += "Contents: {}\n\n".format(contents) + info_str += "Please see the schema documentation at https://docs.neuroml.org/Userdocs/NeuroMLv2.html for more information." print(info_str) return info_str From 69d10f856a5f8fc6e8acdb66833ee1c280355a32 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 26 Oct 2021 18:20:43 +0100 Subject: [PATCH 093/136] chore: regenerate nml.py --- neuroml/nml/nml.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index c92d2253..8a49792b 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Tue Oct 26 17:59:14 2021 by generateDS.py version 2.40.3. +# Generated Tue Oct 26 18:13:56 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -14535,11 +14535,10 @@ def add(self, obj=None, force=False): break if not found: e = Exception( - """A member object of {} type could not be found in this class.\n - {}.""".format( - type(obj).__name__ - ), - self.info(), + """A member object of {} type could not be found in NeuroML class {}.\n{} + """.format( + type(obj).__name__, type(self).__name__, self.info() + ) ) raise e @@ -14563,12 +14562,12 @@ def info(self, show_contents=False): info_str = "Valid members for {} are:\n".format(self.__class__.__name__) for member in self.member_data_items_: - info_str += "{} (class: {})\n".format(member.name, member.data_type) + info_str += "* {} (class: {})\n".format(member.name, member.data_type) if show_contents: contents = getattr(self, member.get_name()) - info_str += "Contents: {}\n\n".format(contents) + info_str += " * Contents: {}\n\n".format(contents) - info_str += "Please see the schema documentation at https://docs.neuroml.org/Userdocs/NeuroMLv2.html for more information." + info_str += "Please see the NeuroML standard schema documentation at https://docs.neuroml.org/Userdocs/NeuroMLv2.html for more information." print(info_str) return info_str From 8837155be4b4d68ec061d3f3dead7465c0719f3c Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 29 Oct 2021 15:17:43 +0100 Subject: [PATCH 094/136] chore: make mypy ignore nml.py --- setup.cfg | 3 +++ 1 file changed, 3 insertions(+) diff --git a/setup.cfg b/setup.cfg index bfeeeedd..307b0ebf 100644 --- a/setup.cfg +++ b/setup.cfg @@ -4,3 +4,6 @@ # lines too long ignore = E501,E502,F403,F405,E231,E228,E225,E226,E265,E261 exclude = neuroml/nml/nml.py,doc + +[mypy] +exclude = neuroml/nml/nml.py,doc From 58cccfaaca77ff66a597307c0a3b1c6f4b00d887 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 29 Oct 2021 15:17:52 +0100 Subject: [PATCH 095/136] meh: squash --- neuroml/nml/helper_methods.py | 85 ++++++++++++++++++++++++----------- 1 file changed, 59 insertions(+), 26 deletions(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index 30832bc0..53dbff6b 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -226,7 +226,7 @@ def surface_area(self): name="add", source='''\ - def add(self, obj=None, force=False): + def add(self, obj=None, hint=None, force=False): """Generic function to allow easy addition of a new member to a NeuroML object. Without arguments, when `obj=None`, it simply calls the `info()` method @@ -237,12 +237,12 @@ def add(self, obj=None, force=False): :param obj: object member to add :type obj: any NeuroML Type defined by the API + :param hint: member name to add to when there are multiple members that `obj` can be added to + :type hint: :param force: boolean to force addition when an obj has already been added previously :type force: bool - :raises Exception: if a member compatible to obj could not be found - :raises Exception: if a member that takes a single value is already set (and force is not set to True) - :raises Exception: if a member that takes a list already includes obj (and force is not set to True) + :raises Exception: if multiple members can accept the object and no hint is provided. """ if not obj: self.info() @@ -252,35 +252,68 @@ def add(self, obj=None, force=False): # then use this to modify the member. Using `vars` also allows us to # modify the value found = False + targets = [] for member in self.member_data_items_: # get_data_type() returns the type as a string, e.g.: 'IncludeType' if member.get_data_type() == type(obj).__name__: - # A single value, not a list: - if member.get_container() == 0: - if vars(self)[member.get_name()]: - if force: - vars(self)[member.get_name()] = obj - else: - raise Exception("""{} has already been assigned. Use `force=True` to overwrite. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(member.get_name())) - else: - vars(self)[member.get_name()] = obj - print("Added {} to {}".format(obj, member.get_name())) - # List - else: - if obj in vars(self)[member.get_name()]: - if force: - vars(self)[member.get_name()].append(obj) - else: - raise Exception("""{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(obj.id, member.get_name())) - else: - vars(self)[member.get_name()].append(obj) - found = True - break - if not found: + targets.append(member) + + if len(targets) == 0: e = Exception( """A member object of {} type could not be found in NeuroML class {}.\\n{} """.format(type(obj).__name__, type(self).__name__, self.info())) raise e + elif len(targets) == 1: + self.__add(obj, targets[0], force) + else: + # more than one target + if not hint: + err_string = """Multiple members can accept {}. Please provide the name of the variable using the `hint` argument to specify which member to add to:""".format(obj.id)) + for t in targets: + err_string += "- {}".format(t.get_name()) + raise Exception(err_string) + + # use hint to figure out which target to use + for t in targets: + if hint == t.get_name(): + self.__add(obj, t, force) + + + def __add(self, obj, hint, force=False): + """Private method to add new member to a specified variable in a NeuroML object. + + :param obj: object member to add + :type obj: any NeuroML Type defined by the API + :param hint: member variable name to add to when there are multiple members that `obj` can be added to + :type hint: MemberSpec_ + :param force: boolean to force addition when an obj has already been added previously + :type force: bool + + :raises Exception: if a member compatible to obj could not be found + :raises Exception: if a member that takes a single value is already set (and force is not set to True) + :raises Exception: if a member that takes a list already includes obj (and force is not set to True) + """ + # A single value, not a list: + if member.get_container() == 0: + if vars(self)[member.get_name()]: + if force: + vars(self)[member.get_name()] = obj + else: + raise Exception("""{} has already been assigned. Use `force=True` to overwrite. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(member.get_name())) + else: + vars(self)[member.get_name()] = obj + print("Added {} to {}".format(obj, member.get_name())) + # List + else: + if obj in vars(self)[member.get_name()]: + if force: + vars(self)[member.get_name()].append(obj) + else: + raise Exception("""{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(obj.id, member.get_name())) + else: + vars(self)[member.get_name()].append(obj) + found = True + break ''', class_names=("BaseWithoutId"), ) From 174513aec5b464cba845e791c32df7d40fab8c25 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 5 Nov 2021 14:58:02 +0000 Subject: [PATCH 096/136] feat: collect inherited members also --- neuroml/nml/helper_methods.py | 52 +++++++++++++++++++++++++++-------- 1 file changed, 40 insertions(+), 12 deletions(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index 53dbff6b..cc07a1e7 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -238,10 +238,11 @@ def add(self, obj=None, hint=None, force=False): :param obj: object member to add :type obj: any NeuroML Type defined by the API :param hint: member name to add to when there are multiple members that `obj` can be added to - :type hint: + :type hint: string :param force: boolean to force addition when an obj has already been added previously :type force: bool + :raises Exception: if a member compatible to obj could not be found :raises Exception: if multiple members can accept the object and no hint is provided. """ if not obj: @@ -251,45 +252,48 @@ def add(self, obj=None, hint=None, force=False): # getattr only returns the value of the provided member but one cannot # then use this to modify the member. Using `vars` also allows us to # modify the value - found = False targets = [] - for member in self.member_data_items_: + all_members = self.get_members() + for member in all_members: # get_data_type() returns the type as a string, e.g.: 'IncludeType' if member.get_data_type() == type(obj).__name__: targets.append(member) + if len(targets) == 0: + # no targets found e = Exception( """A member object of {} type could not be found in NeuroML class {}.\\n{} """.format(type(obj).__name__, type(self).__name__, self.info())) raise e elif len(targets) == 1: + # good, just add it self.__add(obj, targets[0], force) else: # more than one target if not hint: - err_string = """Multiple members can accept {}. Please provide the name of the variable using the `hint` argument to specify which member to add to:""".format(obj.id)) + err_string = """Multiple members can accept {}. Please provide the name of the variable using the `hint` argument to specify which member to add to:\\n""".format(type(obj).__name__) for t in targets: - err_string += "- {}".format(t.get_name()) + err_string += "- {}\\n".format(t.get_name()) raise Exception(err_string) # use hint to figure out which target to use for t in targets: if hint == t.get_name(): self.__add(obj, t, force) + break - def __add(self, obj, hint, force=False): + def __add(self, obj, member, force=False): """Private method to add new member to a specified variable in a NeuroML object. :param obj: object member to add :type obj: any NeuroML Type defined by the API - :param hint: member variable name to add to when there are multiple members that `obj` can be added to - :type hint: MemberSpec_ + :param member: member variable name to add to when there are multiple members that `obj` can be added to + :type member: MemberSpec_ :param force: boolean to force addition when an obj has already been added previously :type force: bool - :raises Exception: if a member compatible to obj could not be found :raises Exception: if a member that takes a single value is already set (and force is not set to True) :raises Exception: if a member that takes a list already includes obj (and force is not set to True) """ @@ -309,11 +313,35 @@ def __add(self, obj, hint, force=False): if force: vars(self)[member.get_name()].append(obj) else: - raise Exception("""{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(obj.id, member.get_name())) + raise Exception("""{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(type(obj).__class__, member.get_name())) else: vars(self)[member.get_name()].append(obj) - found = True - break + + def get_members(self): + """Get member data items, also from ancestors. + + This function is required because generateDS does not include inherited + members in the member_data_items list for a derived class. So, for + example, while IonChannelHH has `gate_hh_rates` which it inherits from + IonChannel, IonChannelHH's `member_data_items_` is empty. It relies on + the IonChannel classes' `member_data_items_` list. + + :returns: list of members, including ones inherited from ancestors. + """ + all_members = self.member_data_items_ + for c in type(self).__mro__: + try: + all_members.extend(c.member_data_items_) + except AttributeError: + pass + except TypeError: + pass + + # deduplicate + # TODO where are the duplicates coming from given that we're not + # calling this recursively? + all_members = list(set(all_members)) + return all_members ''', class_names=("BaseWithoutId"), ) From 675736b58bdd9b8d83b864a05e9d7b6beb5b613a Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 5 Nov 2021 14:58:21 +0000 Subject: [PATCH 097/136] chore: regenerate nml.py --- neuroml/nml/nml.py | 133 +++++++++++++++++++++++++++++++++------------ 1 file changed, 97 insertions(+), 36 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 8a49792b..7bb0ef04 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Tue Oct 26 18:13:56 2021 by generateDS.py version 2.40.3. +# Generated Fri Nov 5 14:54:35 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -14475,7 +14475,7 @@ def _buildChildren( ): pass - def add(self, obj=None, force=False): + def add(self, obj=None, hint=None, force=False): """Generic function to allow easy addition of a new member to a NeuroML object. Without arguments, when `obj=None`, it simply calls the `info()` method @@ -14486,12 +14486,13 @@ def add(self, obj=None, force=False): :param obj: object member to add :type obj: any NeuroML Type defined by the API + :param hint: member name to add to when there are multiple members that `obj` can be added to + :type hint: string :param force: boolean to force addition when an obj has already been added previously :type force: bool :raises Exception: if a member compatible to obj could not be found - :raises Exception: if a member that takes a single value is already set (and force is not set to True) - :raises Exception: if a member that takes a list already includes obj (and force is not set to True) + :raises Exception: if multiple members can accept the object and no hint is provided. """ if not obj: self.info() @@ -14500,40 +14501,15 @@ def add(self, obj=None, force=False): # getattr only returns the value of the provided member but one cannot # then use this to modify the member. Using `vars` also allows us to # modify the value - found = False - for member in self.member_data_items_: + targets = [] + all_members = self.get_members() + for member in all_members: # get_data_type() returns the type as a string, e.g.: 'IncludeType' if member.get_data_type() == type(obj).__name__: - # A single value, not a list: - if member.get_container() == 0: - if vars(self)[member.get_name()]: - if force: - vars(self)[member.get_name()] = obj - else: - raise Exception( - """{} has already been assigned. Use `force=True` to overwrite. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format( - member.get_name() - ) - ) - else: - vars(self)[member.get_name()] = obj - print("Added {} to {}".format(obj, member.get_name())) - # List - else: - if obj in vars(self)[member.get_name()]: - if force: - vars(self)[member.get_name()].append(obj) - else: - raise Exception( - """{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format( - obj.id, member.get_name() - ) - ) - else: - vars(self)[member.get_name()].append(obj) - found = True - break - if not found: + targets.append(member) + + if len(targets) == 0: + # no targets found e = Exception( """A member object of {} type could not be found in NeuroML class {}.\n{} """.format( @@ -14541,6 +14517,91 @@ def add(self, obj=None, force=False): ) ) raise e + elif len(targets) == 1: + # good, just add it + self.__add(obj, targets[0], force) + else: + # more than one target + if not hint: + err_string = """Multiple members can accept {}. Please provide the name of the variable using the `hint` argument to specify which member to add to:\n""".format( + type(obj).__name__ + ) + for t in targets: + err_string += "- {}\n".format(t.get_name()) + raise Exception(err_string) + + # use hint to figure out which target to use + for t in targets: + if hint == t.get_name(): + self.__add(obj, t, force) + break + + def __add(self, obj, member, force=False): + """Private method to add new member to a specified variable in a NeuroML object. + + :param obj: object member to add + :type obj: any NeuroML Type defined by the API + :param member: member variable name to add to when there are multiple members that `obj` can be added to + :type member: MemberSpec_ + :param force: boolean to force addition when an obj has already been added previously + :type force: bool + + :raises Exception: if a member that takes a single value is already set (and force is not set to True) + :raises Exception: if a member that takes a list already includes obj (and force is not set to True) + """ + # A single value, not a list: + if member.get_container() == 0: + if vars(self)[member.get_name()]: + if force: + vars(self)[member.get_name()] = obj + else: + raise Exception( + """{} has already been assigned. Use `force=True` to overwrite. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format( + member.get_name() + ) + ) + else: + vars(self)[member.get_name()] = obj + print("Added {} to {}".format(obj, member.get_name())) + # List + else: + if obj in vars(self)[member.get_name()]: + if force: + vars(self)[member.get_name()].append(obj) + else: + raise Exception( + """{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format( + type(obj).__class__, member.get_name() + ) + ) + else: + vars(self)[member.get_name()].append(obj) + + def get_members(self): + """Get member data items, also from ancestors. + + This function is required because generateDS does not include inherited + members in the member_data_items list for a derived class. So, for + example, while IonChannelHH has `gate_hh_rates` which it inherits from + IonChannel, IonChannelHH's `member_data_items_` is empty. It relies on + the IonChannel classes' `member_data_items_` list. + + :returns: list of members, including ones inherited from ancestors. + """ + all_members = self.member_data_items_ + for c in type(self).__mro__: + try: + all_members.extend(c.member_data_items_) + except AttributeError: + pass + except TypeError: + pass + + # deduplicate + # TODO where are the duplicates coming from given that we're not + # calling this recursively? + all_members = list(set(all_members)) + return all_members def info(self, show_contents=False): """A helper function to get a list of members of this class. From 5e4fb352f3d593c634ec0b3cef3aeca379184793 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 5 Nov 2021 14:58:30 +0000 Subject: [PATCH 098/136] test: add test for get_members --- neuroml/test/test_nml.py | 55 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 50 insertions(+), 5 deletions(-) diff --git a/neuroml/test/test_nml.py b/neuroml/test/test_nml.py index 8becc44c..58fd2af8 100644 --- a/neuroml/test/test_nml.py +++ b/neuroml/test/test_nml.py @@ -20,11 +20,24 @@ class TestNML(unittest.TestCase): """Tests related to nml.py""" - def test_generic_add(self): - """Test the generic add function. - :returns: TODO - - """ + def test_get_members(self): + """Test get_members()""" + example_base = neuroml.Base(id="examplebase") + base_members = example_base.get_members() + + member_names = [] + for m in base_members: + member_names.append(m.get_name()) + + # From parent: baseWithoutId + self.assertIn("neuro_lex_id", member_names) + # From itself + self.assertIn("id", member_names) + # Not in here: + self.assertNotIn("cell", member_names) + + def test_generic_add_single(self): + """Test the generic add function for single addition.""" doc = neuroml.NeuroMLDocument(id="testdoc") cell = neuroml.Cell(id="testcell") cell1 = neuroml.Cell(id="testcell1") @@ -55,6 +68,38 @@ def test_generic_add(self): with self.assertRaises(Exception): biprop.add(net) + def test_generic_add_multiple(self): + """Test add() when an object may belong to multiple members. + + From our example on the docs. + """ + na_channel = neuroml.IonChannelHH(id="na_channel", notes="Sodium channel for HH cell", conductance="10pS", species="na") + gate_m = neuroml.GateHHRates(id="na_m", instances="3", notes="m gate for na channel") + + m_forward_rate = neuroml.HHRate(type="HHExpLinearRate", rate="1per_ms", midpoint="-40mV", scale="10mV") + m_reverse_rate = neuroml.HHRate(type="HHExpRate", rate="4per_ms", midpoint="-65mV", scale="-18mV") + + with self.assertRaises(Exception): + gate_m.add(m_forward_rate) + with self.assertRaises(Exception): + gate_m.add(m_reverse_rate) + + gate_m.add(m_forward_rate, hint="forward_rate") + gate_m.add(m_reverse_rate, hint="reverse_rate") + + na_channel.gate_hh_rates.append(gate_m) + + gate_h = neuroml.GateHHRates(id="na_h", instances="1", notes="h gate for na channel") + h_forward_rate = neuroml.HHRate(type="HHExpRate", rate="0.07per_ms", midpoint="-65mV", scale="-20mV") + h_reverse_rate = neuroml.HHRate(type="HHSigmoidRate", rate="1per_ms", midpoint="-35mV", scale="10mV") + + with self.assertRaises(Exception): + gate_h.add(h_forward_rate) + with self.assertRaises(Exception): + gate_h.add(h_reverse_rate) + gate_h.add(h_forward_rate, hint="forward_rate") + gate_h.add(h_reverse_rate, hint="reverse_rate") + def test_info(self): """Test getting member info.""" cell = neuroml.Cell(id="testcell") From f6e167b8fa15c4c5f0dbaf73a0e654d389ca9849 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 8 Nov 2021 21:10:10 +0000 Subject: [PATCH 099/136] improvement(helper_methods): convert exception to warning Attempting to readd an object is harmless, so make this a warning. Also fixes the comparison to not use `is` because it doesn't only compare the reference, it also compares the value. --- neuroml/nml/helper_methods.py | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index cc07a1e7..a91bce60 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -294,28 +294,31 @@ def __add(self, obj, member, force=False): :param force: boolean to force addition when an obj has already been added previously :type force: bool - :raises Exception: if a member that takes a single value is already set (and force is not set to True) - :raises Exception: if a member that takes a list already includes obj (and force is not set to True) """ + import warnings + # A single value, not a list: if member.get_container() == 0: - if vars(self)[member.get_name()]: - if force: - vars(self)[member.get_name()] = obj - else: - raise Exception("""{} has already been assigned. Use `force=True` to overwrite. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(member.get_name())) - else: + if force: vars(self)[member.get_name()] = obj - print("Added {} to {}".format(obj, member.get_name())) + else: + if vars(self)[member.get_name()]: + warnings.warn("""{} has already been assigned. Use `force=True` to overwrite. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(member.get_name())) + else: + vars(self)[member.get_name()] = obj # List else: - if obj in vars(self)[member.get_name()]: - if force: - vars(self)[member.get_name()].append(obj) - else: - raise Exception("""{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(type(obj).__class__, member.get_name())) - else: + # Do not use 'obj in ..' for membership check because it also + # returns true if an element with the same value exists in the + # container + # https://docs.python.org/3/reference/expressions.html#membership-test-operations + if force: vars(self)[member.get_name()].append(obj) + else: + if any(obj is e for e in vars(self)[member.get_name()]): + warnings.warn("""{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(obj, member.get_name())) + else: + vars(self)[member.get_name()].append(obj) def get_members(self): """Get member data items, also from ancestors. From 564927bd93be7a557bc323d285840844395c8468 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 8 Nov 2021 21:11:47 +0000 Subject: [PATCH 100/136] fix(helper_methods): use shallow copy We want a copy of the object to add other inherited member_data_types_ to. If we use `=`, we get a reference to `self.member_data_types_` object and end up modifying it, which we do not want to do. Not only does that change the nml.py representation, it also means that all the "expanded" lists of `member_data_types_` is held in memory for every neuroml object which greatly increases the memory foot print of models. So, calculate the complete list of member_data_types_ when required and then forget about it, which will allow memory to be garbage collected and freed up. --- neuroml/nml/helper_methods.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index a91bce60..102b59a9 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -331,7 +331,11 @@ def get_members(self): :returns: list of members, including ones inherited from ancestors. """ - all_members = self.member_data_items_ + import copy + # create a copy by value + # if copied by reference (=), the member_data_items_ object variable is + # modified to a large list, greatly increasing the memory usage. + all_members = copy.copy(self.member_data_items_) for c in type(self).__mro__: try: all_members.extend(c.member_data_items_) From 93f7328e81fefea9994dd92b8bf89a0dfa637b06 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 8 Nov 2021 21:14:58 +0000 Subject: [PATCH 101/136] feat(helper_methods): update `append` to use new `add` method --- neuroml/nml/helper_methods.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index 102b59a9..bc79171c 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -1087,14 +1087,13 @@ def get_by_id(self,id): print_(" - Suppressing further warnings about id not found...") return None - def append(self,element): + def append(self, element): """Append an element :param element: element to append :type element: Object """ - from neuroml.utils import append_to_element - append_to_element(self,element) + self.add(element) ''', class_names=("NeuroMLDocument"), From dad20c2c7abdec326fc3762b7e6f18dcaab4395c Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 8 Nov 2021 21:15:48 +0000 Subject: [PATCH 102/136] test(nml.py): update assertion to check for a warning --- neuroml/test/test_nml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/test/test_nml.py b/neuroml/test/test_nml.py index 58fd2af8..4c48dd2e 100644 --- a/neuroml/test/test_nml.py +++ b/neuroml/test/test_nml.py @@ -48,7 +48,7 @@ def test_generic_add_single(self): self.assertIsNone(doc.add(cell)) # Already added, so throw exception - with self.assertRaises(Exception): + with self.assertWarns(UserWarning): doc.add(cell) # Success From 2dcd2a7114dd51610d470c3f37b09b4a1479cd22 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 8 Nov 2021 21:21:22 +0000 Subject: [PATCH 103/136] chore: update comment --- neuroml/test/test_nml.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/neuroml/test/test_nml.py b/neuroml/test/test_nml.py index 4c48dd2e..21b1e16e 100644 --- a/neuroml/test/test_nml.py +++ b/neuroml/test/test_nml.py @@ -79,6 +79,8 @@ def test_generic_add_multiple(self): m_forward_rate = neuroml.HHRate(type="HHExpLinearRate", rate="1per_ms", midpoint="-40mV", scale="10mV") m_reverse_rate = neuroml.HHRate(type="HHExpRate", rate="4per_ms", midpoint="-65mV", scale="-18mV") + # HHRate can go to two different members, so an exception is thrown + # needs hint, as done below with self.assertRaises(Exception): gate_m.add(m_forward_rate) with self.assertRaises(Exception): @@ -93,10 +95,13 @@ def test_generic_add_multiple(self): h_forward_rate = neuroml.HHRate(type="HHExpRate", rate="0.07per_ms", midpoint="-65mV", scale="-20mV") h_reverse_rate = neuroml.HHRate(type="HHSigmoidRate", rate="1per_ms", midpoint="-35mV", scale="10mV") + # HHRate can go to two different members, so an exception is thrown + # needs hint, as done below with self.assertRaises(Exception): gate_h.add(h_forward_rate) with self.assertRaises(Exception): gate_h.add(h_reverse_rate) + gate_h.add(h_forward_rate, hint="forward_rate") gate_h.add(h_reverse_rate, hint="reverse_rate") From 38a160bc774964429a17e9f0abfd7062c0c3ccc7 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 8 Nov 2021 21:21:42 +0000 Subject: [PATCH 104/136] test(nml.py): add test for containers objects --- neuroml/test/test_nml.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/neuroml/test/test_nml.py b/neuroml/test/test_nml.py index 21b1e16e..7fca3c6d 100644 --- a/neuroml/test/test_nml.py +++ b/neuroml/test/test_nml.py @@ -105,6 +105,30 @@ def test_generic_add_multiple(self): gate_h.add(h_forward_rate, hint="forward_rate") gate_h.add(h_reverse_rate, hint="reverse_rate") + def test_add_to_container(self): + """Test adding multiple objects to a container class. """ + network = neuroml.Network() + # They have the same id, but they are unique objects as far as Python + # is concerned + pop0 = neuroml.Population() + pop1 = neuroml.Population() + pop2 = neuroml.Population() + + network.add(pop0) + network.add(pop1) + network.add(pop2) + + pop3 = neuroml.Population(id="unique") + network.add(pop3) + # warning because this is already added + with self.assertWarns(UserWarning): + network.add(pop3) + + # Note that for Python, this is a new object + # So we can add it again + pop4 = neuroml.Population(id="unique") + network.add(pop4) + def test_info(self): """Test getting member info.""" cell = neuroml.Cell(id="testcell") From 2dcbbd2c10e1e12bf2f630baf3d320a2cf448534 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 8 Nov 2021 21:22:29 +0000 Subject: [PATCH 105/136] test(xml_parser): user pytest.parameterize to run on different files Also a few other clean ups. --- neuroml/test/test_xml_parser.py | 82 ++++++++++++--------------------- 1 file changed, 29 insertions(+), 53 deletions(-) diff --git a/neuroml/test/test_xml_parser.py b/neuroml/test/test_xml_parser.py index 4f9426f2..83ec5803 100644 --- a/neuroml/test/test_xml_parser.py +++ b/neuroml/test/test_xml_parser.py @@ -3,77 +3,53 @@ """ +from neuroml.hdf5.DefaultNetworkHandler import DefaultNetworkHandler from neuroml.hdf5.NetworkBuilder import NetworkBuilder from neuroml.hdf5.NeuroMLXMLParser import NeuroMLXMLParser from neuroml import loaders -import logging +import neuroml.writers as writers -import os -try: - import unittest2 as unittest -except ImportError: - import unittest +import os +import pytest -class TestNeuroMLXMLParser(unittest.TestCase): - def test_parse(self): +@pytest.mark.parametrize("f", ["simplenet.nml", "testh5.nml", + "pyr_4_sym.cell.nml", "MediumNet.net.nml", + "complete.nml"]) +class TestNeuroMLXMLParser(): + def test_parse(self, f): base_dir = os.path.dirname(__file__) - # base_dir = '.' + file_name = base_dir + "/../examples/test_files/" + f + print("---- Testing {}".format(file_name)) - logging.basicConfig( - level=logging.INFO, format="%(name)-19s %(levelname)-5s - %(message)s" + nml_doc0 = loaders.read_neuroml2_file( + file_name, include_includes=True, verbose=True ) - for f in [ - "simplenet.nml", - "testh5.nml", - "pyr_4_sym.cell.nml", - "MediumNet.net.nml", - "complete.nml", - ]: - - file_name = base_dir + "/../examples/test_files/" + f - - nml_doc0 = loaders.read_neuroml2_file( - file_name, include_includes=True, verbose=True - ) - - summary0 = nml_doc0.summary(show_includes=False, show_non_network=False) - print("\n" + summary0) - - from neuroml.hdf5.DefaultNetworkHandler import DefaultNetworkHandler - - nmlHandler = DefaultNetworkHandler() - - currParser = NeuroMLXMLParser(nmlHandler) - - currParser.parse(file_name) - - print("-------------------------------\n\n") - - nmlHandler = NetworkBuilder() - - currParser = NeuroMLXMLParser(nmlHandler) - - currParser.parse(file_name) + summary0 = nml_doc0.summary(show_includes=False, show_non_network=False) + # print("\n" + summary0) - nml_doc = nmlHandler.get_nml_doc() + nmlHandler = DefaultNetworkHandler() + currParser = NeuroMLXMLParser(nmlHandler) + currParser.parse(file_name) - summary = nml_doc.summary(show_includes=False, show_non_network=False) + print("-------------------------------\n\n") - print(summary) + nmlHandler_new = NetworkBuilder() + currParser_new = NeuroMLXMLParser(nmlHandler_new) + currParser_new.parse(file_name) + nml_doc_new = nmlHandler_new.get_nml_doc() - compare(summary, summary0) + summary = nml_doc_new.summary(show_includes=False, show_non_network=False) + # print(summary) - nml_file = base_dir + "/../examples/tmp/EXP_" + f - import neuroml.writers as writers + compare(summary, summary0) - writers.NeuroMLWriter.write(nml_doc, nml_file) - print("Written network file to: " + nml_file) + nml_file_new = base_dir + "/../examples/tmp/EXP_" + f - def runTest(self): - print("Running tests in TestNeuroMLXMLParser") + writers.NeuroMLWriter.write(nml_doc_new, nml_file_new) + print("Written network file to: " + nml_file_new) def compare(s1, s2): From 0b56d56fb01f508cb8e3d40ff87c99c89ab9b61f Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 8 Nov 2021 21:23:03 +0000 Subject: [PATCH 106/136] chore(nml.py): regenerate --- neuroml/nml/nml.py | 52 ++++++++++++++++++++++++++-------------------- 1 file changed, 29 insertions(+), 23 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 7bb0ef04..86ebafe5 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Fri Nov 5 14:54:35 2021 by generateDS.py version 2.40.3. +# Generated Mon Nov 8 21:00:41 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -14546,36 +14546,39 @@ def __add(self, obj, member, force=False): :param force: boolean to force addition when an obj has already been added previously :type force: bool - :raises Exception: if a member that takes a single value is already set (and force is not set to True) - :raises Exception: if a member that takes a list already includes obj (and force is not set to True) """ + import warnings + # A single value, not a list: if member.get_container() == 0: - if vars(self)[member.get_name()]: - if force: - vars(self)[member.get_name()] = obj - else: - raise Exception( + if force: + vars(self)[member.get_name()] = obj + else: + if vars(self)[member.get_name()]: + warnings.warn( """{} has already been assigned. Use `force=True` to overwrite. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format( member.get_name() ) ) - else: - vars(self)[member.get_name()] = obj - print("Added {} to {}".format(obj, member.get_name())) + else: + vars(self)[member.get_name()] = obj # List else: - if obj in vars(self)[member.get_name()]: - if force: - vars(self)[member.get_name()].append(obj) - else: - raise Exception( + # Do not use 'obj in ..' for membership check because it also + # returns true if an element with the same value exists in the + # container + # https://docs.python.org/3/reference/expressions.html#membership-test-operations + if force: + vars(self)[member.get_name()].append(obj) + else: + if any(obj is e for e in vars(self)[member.get_name()]): + warnings.warn( """{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format( - type(obj).__class__, member.get_name() + obj, member.get_name() ) ) - else: - vars(self)[member.get_name()].append(obj) + else: + vars(self)[member.get_name()].append(obj) def get_members(self): """Get member data items, also from ancestors. @@ -14588,7 +14591,12 @@ def get_members(self): :returns: list of members, including ones inherited from ancestors. """ - all_members = self.member_data_items_ + import copy + + # create a copy by value + # if copied by reference (=), the member_data_items_ object variable is + # modified to a large list, greatly increasing the memory usage. + all_members = copy.copy(self.member_data_items_) for c in type(self).__mro__: try: all_members.extend(c.member_data_items_) @@ -38770,9 +38778,7 @@ def append(self, element): :param element: element to append :type element: Object """ - from neuroml.utils import append_to_element - - append_to_element(self, element) + self.add(element) # end class NeuroMLDocument From 1092bf93a32f51e8c5f993e82636d466b3b709da Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 8 Nov 2021 21:24:32 +0000 Subject: [PATCH 107/136] chore: reformat with black --- neuroml/benchmarks/arraymorph_benchmarks.py | 1 - neuroml/nml/helper_methods.py | 9 +---- neuroml/test/test_nml.py | 42 +++++++++++++++------ neuroml/test/test_xml_parser.py | 15 ++++++-- 4 files changed, 42 insertions(+), 25 deletions(-) diff --git a/neuroml/benchmarks/arraymorph_benchmarks.py b/neuroml/benchmarks/arraymorph_benchmarks.py index 855e62a8..3366aa2a 100644 --- a/neuroml/benchmarks/arraymorph_benchmarks.py +++ b/neuroml/benchmarks/arraymorph_benchmarks.py @@ -243,7 +243,6 @@ def benchmark_arraymorph_writer(): # plt.show() - plt.title( "ArrayMorph write to disk benchmarks for HDF5 and NeuroML serialization formats" ) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index bc79171c..6640ec55 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -392,14 +392,7 @@ def info(self, show_contents=False): # Provide a list of your method specifications. # This list of specifications must be named METHOD_SPECS. # -METHOD_SPECS = ( - length, - volume, - surface_area, - num_segments, - generic_add, - generic_list -) +METHOD_SPECS = (length, volume, surface_area, num_segments, generic_add, generic_list) seg_grp = MethodSpec( diff --git a/neuroml/test/test_nml.py b/neuroml/test/test_nml.py index 7fca3c6d..5a22dea0 100644 --- a/neuroml/test/test_nml.py +++ b/neuroml/test/test_nml.py @@ -10,6 +10,7 @@ import neuroml + try: import unittest2 as unittest except ImportError: @@ -73,11 +74,22 @@ def test_generic_add_multiple(self): From our example on the docs. """ - na_channel = neuroml.IonChannelHH(id="na_channel", notes="Sodium channel for HH cell", conductance="10pS", species="na") - gate_m = neuroml.GateHHRates(id="na_m", instances="3", notes="m gate for na channel") - - m_forward_rate = neuroml.HHRate(type="HHExpLinearRate", rate="1per_ms", midpoint="-40mV", scale="10mV") - m_reverse_rate = neuroml.HHRate(type="HHExpRate", rate="4per_ms", midpoint="-65mV", scale="-18mV") + na_channel = neuroml.IonChannelHH( + id="na_channel", + notes="Sodium channel for HH cell", + conductance="10pS", + species="na", + ) + gate_m = neuroml.GateHHRates( + id="na_m", instances="3", notes="m gate for na channel" + ) + + m_forward_rate = neuroml.HHRate( + type="HHExpLinearRate", rate="1per_ms", midpoint="-40mV", scale="10mV" + ) + m_reverse_rate = neuroml.HHRate( + type="HHExpRate", rate="4per_ms", midpoint="-65mV", scale="-18mV" + ) # HHRate can go to two different members, so an exception is thrown # needs hint, as done below @@ -91,9 +103,15 @@ def test_generic_add_multiple(self): na_channel.gate_hh_rates.append(gate_m) - gate_h = neuroml.GateHHRates(id="na_h", instances="1", notes="h gate for na channel") - h_forward_rate = neuroml.HHRate(type="HHExpRate", rate="0.07per_ms", midpoint="-65mV", scale="-20mV") - h_reverse_rate = neuroml.HHRate(type="HHSigmoidRate", rate="1per_ms", midpoint="-35mV", scale="10mV") + gate_h = neuroml.GateHHRates( + id="na_h", instances="1", notes="h gate for na channel" + ) + h_forward_rate = neuroml.HHRate( + type="HHExpRate", rate="0.07per_ms", midpoint="-65mV", scale="-20mV" + ) + h_reverse_rate = neuroml.HHRate( + type="HHSigmoidRate", rate="1per_ms", midpoint="-35mV", scale="10mV" + ) # HHRate can go to two different members, so an exception is thrown # needs hint, as done below @@ -106,7 +124,7 @@ def test_generic_add_multiple(self): gate_h.add(h_reverse_rate, hint="reverse_rate") def test_add_to_container(self): - """Test adding multiple objects to a container class. """ + """Test adding multiple objects to a container class.""" network = neuroml.Network() # They have the same id, but they are unique objects as far as Python # is concerned @@ -133,6 +151,6 @@ def test_info(self): """Test getting member info.""" cell = neuroml.Cell(id="testcell") info = cell.info() - self.assertRegex(info, 'morphology') - self.assertRegex(info, 'biophysical_properties') - self.assertNotRegex(info, 'network') + self.assertRegex(info, "morphology") + self.assertRegex(info, "biophysical_properties") + self.assertNotRegex(info, "network") diff --git a/neuroml/test/test_xml_parser.py b/neuroml/test/test_xml_parser.py index 83ec5803..8b8ec6d4 100644 --- a/neuroml/test/test_xml_parser.py +++ b/neuroml/test/test_xml_parser.py @@ -14,10 +14,17 @@ import pytest -@pytest.mark.parametrize("f", ["simplenet.nml", "testh5.nml", - "pyr_4_sym.cell.nml", "MediumNet.net.nml", - "complete.nml"]) -class TestNeuroMLXMLParser(): +@pytest.mark.parametrize( + "f", + [ + "simplenet.nml", + "testh5.nml", + "pyr_4_sym.cell.nml", + "MediumNet.net.nml", + "complete.nml", + ], +) +class TestNeuroMLXMLParser: def test_parse(self, f): base_dir = os.path.dirname(__file__) file_name = base_dir + "/../examples/test_files/" + f From 815737b3388e30b78bd33c5559aa2d52c5f185e5 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 8 Nov 2021 21:29:59 +0000 Subject: [PATCH 108/136] refactor: add deprecation warning to `append_to_element` utility method --- neuroml/utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/neuroml/utils.py b/neuroml/utils.py index 3514885e..59623d4c 100644 --- a/neuroml/utils.py +++ b/neuroml/utils.py @@ -6,6 +6,7 @@ import os.path import sys import inspect +import warnings import neuroml @@ -129,6 +130,7 @@ def append_to_element(parent, child): :type child: Object :raises Exception: when the child could not be added to the parent """ + warnings.warn("This method is deprecated and will be removed in future releases. Please use the `add` methods provided in each NeuroML ComponentType object", FutureWarning, stacklevel=2) membs = inspect.getmembers(parent) # print("Adding %s to element %s"%(child, parent)) mappings = {} From b9acf1f60b776479591ab19e9f809496013f22b2 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 8 Nov 2021 21:35:32 +0000 Subject: [PATCH 109/136] docs: update changelog --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index fb1a24ed..9dd1493f 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,10 @@ See https://docs.neuroml.org/ for an overview of the various NeuroML libraries. ### version 0.2.57 (dev) - Enable Python 3.10 support +- Regenerate nml.py with generateDS using Python 3 +- Add generic `add` method to all NeuroML ComponentType classes that allows users to easily construct their NeuroML documents. +- Improve unit tests +- DEPRECATION notice: `append_to_element` will be deprecated in future releases, please use the `add` method instead ### version 0.2.56 From 798458991656b4ed6b7a3c85df3d8fdf557a1e6e Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 17 Nov 2021 11:07:52 +0000 Subject: [PATCH 110/136] chore: update to documented XSD file File from https://github.com/NeuroML/NeuroML2/pull/172 --- neuroml/nml/NeuroML_v2.2.xsd | 622 ++++++++++++++--------------------- 1 file changed, 246 insertions(+), 376 deletions(-) diff --git a/neuroml/nml/NeuroML_v2.2.xsd b/neuroml/nml/NeuroML_v2.2.xsd index 85d7be99..85a0db7b 100644 --- a/neuroml/nml/NeuroML_v2.2.xsd +++ b/neuroml/nml/NeuroML_v2.2.xsd @@ -1,13 +1,5 @@ - - + + @@ -94,9 +86,7 @@ - + @@ -211,10 +201,10 @@ - An attribute useful as id of segments, connections, etc: integer >=0 only! - + An attribute useful as id of segments, connections, etc: integer >=0 only! + - + @@ -225,10 +215,10 @@ - Integer >=1 only! - + Integer >=1 only! + - + @@ -240,7 +230,7 @@ - Double >0 only + Double >0 only @@ -277,7 +267,7 @@ - Generic property with a tag and value + A property ( a **tag** and **value** pair ), which can be on any **baseStandalone** either as a direct child, or within an **Annotation** . Generally something which helps the visual display or facilitates simulation of a Component, but is not a core physiological property. Common examples include: **numberInternalDivisions,** equivalent of nseg in NEURON; **radius,** for a radius to use in graphical displays for abstract cells ( i. e. without defined morphologies ); **color,** the color to use for a **Population** or **populationList** of cells; **recommended_dt_ms,** the recommended timestep to use for simulating a **Network** , **recommended_duration_ms** the recommended duration to use when running a **Network** @@ -286,7 +276,7 @@ - Placeholder for MIRIAM related metadata, among others. + A structured annotation containing metadata, specifically RDF or **property** elements @@ -531,14 +521,10 @@ - - - - + + + + @@ -604,42 +590,24 @@ Various types of inputs which are defined in NeuroML2. This list will be expanded... - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + @@ -683,7 +651,7 @@ - Kinetic scheme based ion channel. + A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them @@ -704,8 +672,7 @@ - Note ionChannel and ionChannelHH are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. - One of these should be removed, probably ionChannelHH. + Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. @@ -732,8 +699,7 @@ - Note ionChannel and ionChannelHH are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. - One of these should be removed, probably ionChannelHH. + Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. @@ -742,7 +708,7 @@ - Same as ionChannel, but with a vShift parameter to change voltage activation of gates. The exact usage of vShift in expressions for rates is determined by the individual gates. + Same as **ionChannel** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. @@ -765,7 +731,7 @@ - + A value for the conductance scaling which varies as a standard function of the difference between the current temperature, **temperature,** and the temperature at which the conductance was originally determined, **experimentalTemp** @@ -788,7 +754,7 @@ - + A **KSState** with **relativeConductance** of 0 @@ -797,7 +763,7 @@ - + A **KSState** with **relativeConductance** of 1 @@ -810,7 +776,7 @@ - + A forward only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** @@ -823,7 +789,7 @@ - + A reverse only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** @@ -843,7 +809,7 @@ - + KS Transition specified in terms of time constant **tau** and steady state **inf** @@ -854,10 +820,10 @@ - - + + - + @@ -865,7 +831,7 @@ - + A gate which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them @@ -879,7 +845,7 @@ - + @@ -902,7 +868,7 @@ - + Gate which follows the general Hodgkin Huxley formalism @@ -911,14 +877,14 @@ - + - + Gate which follows the general Hodgkin Huxley formalism @@ -928,14 +894,14 @@ - + - + Gate which follows the general Hodgkin Huxley formalism @@ -945,13 +911,13 @@ - + - + Gate which follows the general Hodgkin Huxley formalism @@ -967,7 +933,7 @@ - + Gate which follows the general Hodgkin Huxley formalism @@ -980,7 +946,7 @@ - + Gate which follows the general Hodgkin Huxley formalism but is instantaneous, so tau = 0 and gate follows exactly inf value @@ -994,7 +960,7 @@ - + Gate composed of subgates contributing with fractional conductance @@ -1067,7 +1033,7 @@ - + Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** @@ -1085,7 +1051,7 @@ - + Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. @@ -1100,7 +1066,7 @@ - + Base type for all synapses, i. e. ComponentTypes which produce a current ( dimension current ) and change Dynamics in response to an incoming event. cno_0000009 @@ -1110,7 +1076,7 @@ - + Base type for synapses with a dependence on membrane potential @@ -1120,7 +1086,7 @@ - + Synapse model which produces a synaptic current. @@ -1133,7 +1099,7 @@ - + Synapse model which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 @@ -1147,7 +1113,7 @@ - + Synapse model suited for a sum of two expTwoSynapses which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 @@ -1164,7 +1130,7 @@ - + Gap junction/single electrical connection @@ -1175,7 +1141,7 @@ - + Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection. @@ -1190,7 +1156,7 @@ - + Behaves just like a one way gap junction. @@ -1209,7 +1175,7 @@ - + Graded/analog synapse. Based on synapse in Methods of http://www. nature. com/neuro/journal/v7/n12/abs/nn1352. html @@ -1222,7 +1188,7 @@ - + Alpha current synapse: rise time and decay time are both **tau.** @@ -1233,7 +1199,7 @@ - + Ohmic synapse model where rise time and decay time are both **tau.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** @@ -1245,7 +1211,7 @@ - + Ohmic synapse model whose conductance rises instantaneously by ( **gbase** * **weight** ) on receiving an event, and which decays exponentially to zero with time course **tauDecay** @@ -1258,7 +1224,7 @@ - + Ohmic synapse model whose conductance waveform on receiving an event has a rise time of **tauRise** and a decay time of **tauDecay.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** @@ -1272,7 +1238,7 @@ - + Ohmic synapse similar to expTwoSynapse but consisting of two components that can differ in decay times and max conductances but share the same rise time. @@ -1287,23 +1253,19 @@ - + Synapse consisting of two independent synaptic mechanisms ( e. g. AMPA-R and NMDA-R ), which can be easily colocated in connections - - + + - + Biexponential synapse that allows for optional block and plasticity mechanisms, which can be expressed as child elements. @@ -1316,15 +1278,11 @@ - + - - - + + + @@ -1337,12 +1295,9 @@ - - - + + + @@ -1355,7 +1310,7 @@ - + Base type of any cell ( e. g. point neuron like **izhikevich2007Cell** , or a morphologically detailed **Cell** with **segment** s ) which can be used in a **population** @@ -1366,7 +1321,7 @@ - + Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time constant **tau** @@ -1374,7 +1329,7 @@ - + Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time course **tau.** It has a refractory period of **refract** after spiking @@ -1386,7 +1341,7 @@ - + Integrate and fire cell with capacitance **C,** **leakConductance** and **leakReversal** @@ -1394,7 +1349,7 @@ - + Integrate and fire cell with capacitance **C,** **leakConductance,** **leakReversal** and refractory period **refract** @@ -1408,7 +1363,7 @@ - + Cell based on the 2003 model of Izhikevich, see http://izhikevich. org/publications/spikes. htm @@ -1424,7 +1379,7 @@ - + Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) @@ -1441,7 +1396,7 @@ - + Cell based on the modified Izhikevich model in Izhikevich 2007, Dynamical systems in neuroscience, MIT Press @@ -1459,7 +1414,7 @@ - + Model based on Brette R and Gerstner W ( 2005 ) Adaptive Exponential Integrate-and-Fire Model as an Effective Description of Neuronal Activity. J Neurophysiol 94:3637-3642 @@ -1468,7 +1423,7 @@ - + Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github. com/NeuroML/NeuroML2/issues/42 ) @@ -1482,7 +1437,7 @@ - + The Fitzhugh Nagumo model is a two-dimensional simplification of the Hodgkin-Huxley model of spike generation in squid giant axons. This system was suggested by FitzHugh ( FitzHugh R. [1961]: Impulses and physiological states in theoretical models of nerve membrane. Biophysical J. 1:445-466 ), who called it " Bonhoeffer-van der Pol model ", and the equivalent circuit by Nagumo et al. ( Nagumo J. , Arimoto S. , and Yoshizawa S. [1962] An active pulse transmission line simulating nerve axon. Proc IRE. 50:2061-2070. 1962 ). This version corresponds to the one described in FitzHugh R. [1969]: Mathematical models of excitation and propagation in nerve. Chapter 1 ( pp. 1-85 in H. P. Schwan, ed. Biological Engineering, McGraw-Hill Book Co. , N. Y. ) @@ -1517,7 +1472,7 @@ - + Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github. com/OpenSourceBrain/PinskyRinzelModel @@ -1550,7 +1505,7 @@ - + Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. @@ -1563,14 +1518,12 @@ - + Variant of cell with two independent Ca2+ pools. Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. - Standalone element which is usually inside a single cell, but could be outside and - referenced by id. - + The collection of **segment** s which specify the 3D structure of the cell, along with a number of **segmentGroup** s @@ -1601,7 +1554,7 @@ - + A segment defines the smallest unit within a possibly branching structure ( **morphology** ), such as a dendrite or axon. Its **id** should be a nonnegative integer ( usually soma/root = 0 ). Its end points are given by the **proximal** and **distal** points. The **proximal** point can be omitted, usually because it is the same as a point on the **parent** segment, see **proximal** for details. **parent** specifies the parent segment. The first segment of a **cell** ( with no **parent** ) usually represents the soma. The shape is normally a cylinder ( radii of the **proximal** and **distal** equal, but positions different ) or a conical frustum ( radii and positions different ). If the x, y, x positions of the **proximal** and **distal** are equal, the segment can be interpreted as a sphere, and in this case the radii of these points must be equal. NOTE: LEMS does not yet support multicompartmental modelling, so the Dynamics here is only appropriate for single compartment modelling. @@ -1611,7 +1564,7 @@ - A 3D point with diameter. + Base type for ComponentTypes which specify an ( **x,** **y,** **z** ) coordinate along with a **diameter.** Note: no dimension used in the attributes for these coordinates! These are assumed to have dimension micrometer ( 10^-6 m ). This is due to micrometers being the default option for the majority of neuronal morphology formats, and dimensions are omitted here to facilitate reading and writing of morphologies in NeuroML. @@ -1638,7 +1591,7 @@ - + A method to describe a group of **segment** s in a **morphology** , e. g. soma_group, dendrite_group, axon_group. While a name is useful to describe the group, the **neuroLexId** attribute can be used to explicitly specify the meaning of the group, e. g. sao1044911821 for 'Neuronal Cell Body', sao1211023249 for 'Dendrite'. The **segment** s in this group can be specified as: a list of individual **member** segments; a **path** , all of the segments along which should be included; a **subTree** of the **cell** to include; other segmentGroups to **include** ( so all segments from those get included here ). An **inhomogeneousParameter** can be defined on the region of the cell specified by this group ( see **variableParameter** for usage ). @@ -1652,7 +1605,7 @@ - + An inhomogeneous parameter specified across the **segmentGroup** ( see **variableParameter** for usage ). @@ -1675,25 +1628,25 @@ - + A single identified **segment** which is part of the **segmentGroup** - + Include all members of another **segmentGroup** in this group - + Include all the **segment** s between those specified by **from** and **to** , inclusive - + Include all the **segment** s distal to that specified by **from** in the **segmentGroup** @@ -1707,9 +1660,7 @@ - Standalone element which is usually inside a single cell, but could be outside and - referenced by id. - + The biophysical properties of the **cell** , including the **membraneProperties** and the **intracellularProperties** @@ -1727,9 +1678,7 @@ - Standalone element which is usually inside a single cell, but could be outside and - referenced by id. - + The biophysical properties of the **cell** , including the **membraneProperties2CaPools** and the **intracellularProperties2CaPools** for a cell with two Ca pools @@ -1770,7 +1719,7 @@ --> - + Properties specific to the membrane, such as the **populations** of channels, **channelDensities,** **specificCapacitance,** etc. @@ -1785,12 +1734,12 @@ - + Variant of membraneProperties with 2 independent Ca pools - Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction. + Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction @@ -1852,7 +1801,7 @@ - + Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** @@ -1881,7 +1830,7 @@ - + Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON @@ -1909,7 +1858,7 @@ - + Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON @@ -1938,7 +1887,7 @@ - + Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON @@ -1974,7 +1923,7 @@ - + Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** @@ -1986,7 +1935,7 @@ - + Same as **channelDensity** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. @@ -2022,7 +1971,7 @@ - + Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. @@ -2032,7 +1981,7 @@ - + This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github. com/OpenSourceBrain/ghk-nernst. @@ -2063,7 +2012,7 @@ - + Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. @@ -2097,7 +2046,7 @@ - + Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github. com/OpenSourceBrain/ghk-nernst. @@ -2107,12 +2056,12 @@ - + Specifies a **parameter** ( e. g. condDensity ) which can vary its value across a **segmentGroup.** The value is calculated from **value** attribute of the **inhomogeneousValue** subelement. This element is normally a child of **channelDensityNonUniform** , **channelDensityNonUniformNernst** or **channelDensityNonUniformGHK** and is used to calculate the value of the conductance, etc. which will vary on different parts of the cell. The **segmentGroup** specified here needs to define an **inhomogeneousParameter** ( referenced from **inhomogeneousParameter** in the **inhomogeneousValue** ), which calculates a **variable** ( e. g. p ) varying across the cell ( e. g. based on the path length from soma ), which is then used in the **value** attribute of the **inhomogeneousValue** ( so for example condDensity = f( p ) ) - + Specifies the **value** of an **inhomogeneousParameter.** For usage see **variableParameter** - + Specifies a single instance of a component in a **population** ( placed at **location** ). - + Specifies the ( x, y, z ) location of a single **instance** of a component in a **population** @@ -2717,8 +2589,7 @@ - Single explicit connection. Introduced to test connections in LEMS. Will probably be removed in favour of - connections wrapped in projection element + Explicit event connection between named components, which gets processed via a new instance of a **synapse** component which is created on the target component @@ -2741,7 +2612,7 @@ - Projection (set of synaptic connections) between two populations. Chemical/event based synaptic transmission + Projection from one population, **presynapticPopulation** to another, **postsynapticPopulation,** through **synapse.** Contains lists of **connection** or **connectionWD** elements. @@ -2804,7 +2675,7 @@ - Individual chemical (event based) synaptic connection, weight==1 and no delay + Event connection directly between named components, which gets processed via a new instance of a **synapse** component which is created on the target component. Normally contained inside a **projection** element. @@ -2814,7 +2685,7 @@ - Individual synaptic connection with weight and delay + Event connection between named components, which gets processed via a new instance of a synapse component which is created on the target component, includes setting of **weight** and **delay** for the synaptic connection @@ -2827,7 +2698,7 @@ - Projection between two populations consisting of electrical connections (gap junctions) + A projection between **presynapticPopulation** to another **postsynapticPopulation** through gap junctions. @@ -2843,7 +2714,7 @@ - Individual electrical synaptic connection + To enable connections between populations through gap junctions. @@ -2855,7 +2726,7 @@ - Projection between two populations consisting of analog connections (e.g. graded synapses) + To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. @@ -2864,7 +2735,7 @@ - Projection between two populations consisting of analog connections (e.g. graded synapses). Includes setting of weight for the connection + To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. Includes setting of **weight** for the connection @@ -2876,7 +2747,7 @@ - Projection between two populations consisting of analog connections (e.g. graded synapses) + A projection between **presynapticPopulation** and **postsynapticPopulation** through components **preComponent** at the start and **postComponent** at the end of a **continuousConnection** or **continuousConnectionInstance** . Can be used for analog synapses. @@ -2892,7 +2763,7 @@ - Individual continuous/analog synaptic connection + An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Can be used for analog synapses. @@ -2905,7 +2776,7 @@ - Individual continuous/analog synaptic connection - instance based + An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. @@ -2915,7 +2786,7 @@ - Individual continuous/analog synaptic connection - instance based. Includes setting of _weight for the connection + An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. Includes setting of **weight** for the connection @@ -2927,8 +2798,7 @@ - Single explicit input. Introduced to test inputs in LEMS. Will probably be removed in favour of - inputs wrapped in inputList element + An explicit input ( anything which extends **basePointCurrent** ) to a target cell in a population @@ -2938,7 +2808,7 @@ - List of inputs to a population. Currents will be provided by the specified component. + An explicit list of **input** s to a **population.** @@ -2955,7 +2825,7 @@ - Individual input to the cell specified by target + Specifies a single input to a **target,** optionally giving the **segmentId** ( default 0 ) and **fractionAlong** the segment ( default 0. 5 ). @@ -2967,7 +2837,7 @@ - Individual input to the cell specified by target. Includes setting of _weight for the connection + Specifies input lists. Can set **weight** to scale individual inputs. @@ -2994,7 +2864,7 @@ - + Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble. org/trac/PyNN/wiki/StandardModels @@ -3008,7 +2878,7 @@ - + Base type of any PyNN standard integrate and fire model @@ -3019,7 +2889,7 @@ - + Base type of conductance based PyNN IaF cell models @@ -3030,7 +2900,7 @@ - + Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic current @@ -3039,7 +2909,7 @@ - + Leaky integrate and fire model with fixed threshold and decaying-exponential post-synaptic current @@ -3048,7 +2918,7 @@ - + Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic conductance @@ -3057,7 +2927,7 @@ - + Leaky integrate and fire model with fixed threshold and exponentially-decaying post-synaptic conductance @@ -3070,7 +2940,7 @@ - + Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with exponentially-decaying post-synaptic conductance @@ -3078,7 +2948,7 @@ - + Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with alpha-function-shaped post-synaptic conductance @@ -3096,7 +2966,7 @@ - + Single-compartment Hodgkin-Huxley-type neuron with transient sodium and delayed-rectifier potassium currents using the ion channel models from Traub. @@ -3107,7 +2977,7 @@ - + Base type for all PyNN synapses. Note, the current **I** produced is dimensionless, but it requires a membrane potential **v** with dimension voltage @@ -3117,7 +2987,7 @@ - + Conductance based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) @@ -3127,7 +2997,7 @@ - + Alpha synapse: rise time and decay time are both tau_syn. Conductance based synapse. @@ -3137,7 +3007,7 @@ - + Current based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) @@ -3147,7 +3017,7 @@ - + Alpha synapse: rise time and decay time are both tau_syn. Current based synapse. @@ -3161,7 +3031,7 @@ - + Spike source, generating spikes according to a Poisson process. From 36ef5c739d44096c46dc2f4719dccee1eca493c6 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 17 Nov 2021 11:11:46 +0000 Subject: [PATCH 111/136] chore: regenerate nml.py to include documentation --- neuroml/nml/nml.py | 327 ++++++++++++++++++++++++++++++++++++--------- 1 file changed, 266 insertions(+), 61 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 86ebafe5..ffda0870 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Mon Nov 8 21:00:41 2021 by generateDS.py version 2.40.3. +# Generated Wed Nov 17 11:10:33 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -1204,7 +1204,7 @@ class populationTypes(str, Enum): class Property(GeneratedsSuper): - """Property -- Generic property with a tag and value""" + """Property -- A property ( a **tag** and **value** pair ), which can be on any **baseStandalone** either as a direct child, or within an **Annotation** . Generally something which helps the visual display or facilitates simulation of a Component, but is not a core physiological property. Common examples include: **numberInternalDivisions,** equivalent of nseg in NEURON; **radius,** for a radius to use in graphical displays for abstract cells ( i. e. without defined morphologies ); **color,** the color to use for a **Population** or **populationList** of cells; **recommended_dt_ms,** the recommended timestep to use for simulating a **Network** , **recommended_duration_ms** the recommended duration to use when running a **Network**""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -1360,7 +1360,7 @@ def _buildChildren( class Annotation(GeneratedsSuper): - """Annotation -- Placeholder for MIRIAM related metadata, among others.""" + """Annotation -- A structured annotation containing metadata, specifically RDF or **property** elements""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -4770,6 +4770,8 @@ def _buildChildren( class Q10ConductanceScaling(GeneratedsSuper): + """Q10ConductanceScaling -- A value for the conductance scaling which varies as a standard function of the difference between the current temperature, **temperature,** and the temperature at which the conductance was originally determined, **experimentalTemp**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -7213,7 +7215,7 @@ def _buildChildren( class Point3DWithDiam(GeneratedsSuper): - """Point3DWithDiam -- A 3D point with diameter.""" + """Point3DWithDiam -- Base type for ComponentTypes which specify an ( **x,** **y,** **z** ) coordinate along with a **diameter.** Note: no dimension used in the attributes for these coordinates! These are assumed to have dimension micrometer ( 10^-6 m ). This is due to micrometers being the default option for the majority of neuronal morphology formats, and dimensions are omitted here to facilitate reading and writing of morphologies in NeuroML.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -7754,6 +7756,8 @@ def _buildChildren( class Member(GeneratedsSuper): + """Member -- A single identified **segment** which is part of the **segmentGroup**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -7915,6 +7919,8 @@ def _buildChildren( class Include(GeneratedsSuper): + """Include -- Include all members of another **segmentGroup** in this group""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -8092,6 +8098,8 @@ def _buildChildren( class Path(GeneratedsSuper): + """Path -- Include all the **segment** s between those specified by **from** and **to** , inclusive""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -8273,6 +8281,8 @@ def _buildChildren( class SubTree(GeneratedsSuper): + """SubTree -- Include all the **segment** s distal to that specified by **from** in the **segmentGroup**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -8620,6 +8630,8 @@ def _buildChildren( class MembraneProperties(GeneratedsSuper): + """MembraneProperties -- Properties specific to the membrane, such as the **populations** of channels, **channelDensities,** **specificCapacitance,** etc.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -9251,6 +9263,8 @@ def _buildChildren( class MembraneProperties2CaPools(MembraneProperties): + """MembraneProperties2CaPools -- Variant of membraneProperties with 2 independent Ca pools""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -9477,7 +9491,7 @@ def _buildChildren( class SpikeThresh(GeneratedsSuper): - """SpikeThresh -- Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction.""" + """SpikeThresh -- Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -10451,6 +10465,8 @@ def _buildChildren( class VariableParameter(GeneratedsSuper): + """VariableParameter -- Specifies a **parameter** ( e. g. condDensity ) which can vary its value across a **segmentGroup.** The value is calculated from **value** attribute of the **inhomogeneousValue** subelement. This element is normally a child of **channelDensityNonUniform** , **channelDensityNonUniformNernst** or **channelDensityNonUniformGHK** and is used to calculate the value of the conductance, etc. which will vary on different parts of the cell. The **segmentGroup** specified here needs to define an **inhomogeneousParameter** ( referenced from **inhomogeneousParameter** in the **inhomogeneousValue** ), which calculates a **variable** ( e. g. p ) varying across the cell ( e. g. based on the path length from soma ), which is then used in the **value** attribute of the **inhomogeneousValue** ( so for example condDensity = f( p ) )""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -10670,6 +10686,8 @@ def _buildChildren( class InhomogeneousValue(GeneratedsSuper): + """InhomogeneousValue -- Specifies the **value** of an **inhomogeneousParameter.** For usage see **variableParameter**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -10852,6 +10870,8 @@ class Species(GeneratedsSuper): """ion -- Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now until LEMS implementation can select by id. TODO: remove. + * Species -- Description of a chemical species identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration + """ __hash__ = GeneratedsSuper.__hash__ @@ -11200,6 +11220,8 @@ def _buildChildren( class IntracellularProperties(GeneratedsSuper): + """IntracellularProperties -- Biophysical properties related to the intracellular space within the **cell** , such as the **resistivity** and the list of ionic **species** present. **caConc** and **caConcExt** are explicitly exposed here to facilitate accessing these values from other Components, even though **caConcExt** is clearly not an intracellular property""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -11431,6 +11453,8 @@ def _buildChildren( class IntracellularProperties2CaPools(IntracellularProperties): + """IntracellularProperties2CaPools -- Variant of intracellularProperties with 2 independent Ca pools""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -12783,6 +12807,8 @@ def _buildChildren( class Instance(GeneratedsSuper): + """Instance -- Specifies a single instance of a component in a **population** ( placed at **location** ).""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -13007,6 +13033,8 @@ def __repr__(self): class Location(GeneratedsSuper): + """Location -- Specifies the ( x, y, z ) location of a single **instance** of a component in a **population**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("x", "xs:float", 0, 0, {"use": "required", "name": "x"}), @@ -13181,10 +13209,7 @@ def __repr__(self): class SynapticConnection(GeneratedsSuper): - """SynapticConnection -- Single explicit connection. Introduced to test connections in LEMS. Will probably be removed in favour of - connections wrapped in projection element - - """ + """SynapticConnection -- Explicit event connection between named components, which gets processed via a new instance of a **synapse** component which is created on the target component""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -13463,10 +13488,7 @@ def __str__(self): class ExplicitInput(GeneratedsSuper): - """ExplicitInput -- Single explicit input. Introduced to test inputs in LEMS. Will probably be removed in favour of - inputs wrapped in inputList element - - """ + """ExplicitInput -- An explicit input ( anything which extends **basePointCurrent** ) to a target cell in a population""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -13723,7 +13745,7 @@ def __str__(self): class Input(GeneratedsSuper): - """Input -- Individual input to the cell specified by target""" + """Input -- Specifies a single input to a **target,** optionally giving the **segmentId** ( default 0 ) and **fractionAlong** the segment ( default 0. 5 ).""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -14098,7 +14120,7 @@ def __str__(self): class InputW(Input): - """InputW -- Individual input to the cell specified by target. Includes setting of _weight for the connection""" + """InputW -- Specifies input lists. Can set **weight** to scale individual inputs.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -14280,7 +14302,10 @@ def __str__(self): class BaseWithoutId(GeneratedsSuper): - """BaseWithoutId -- Base element without ID specified *yet*, e.g. for an element with a particular requirement on its id which does not comply with NmlId (e.g. Segment needs nonNegativeInteger).""" + """BaseWithoutId -- Base element w + ithout ID specified *yet*, e.g. for an element with a particular requirement on its id which does not comply with NmlId (e.g. Segment needs nonNegativeInteger). + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -15420,6 +15445,8 @@ def _buildChildren( class SpikeSourcePoisson(Standalone): + """SpikeSourcePoisson -- Spike source, generating spikes according to a Poisson process.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -15723,7 +15750,7 @@ def _buildChildren( class InputList(Base): - """InputList -- List of inputs to a population. Currents will be provided by the specified component.""" + """InputList -- An explicit list of **input** s to a **population.**""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -16687,6 +16714,8 @@ def _buildChildren( class Population(Standalone): + """Population -- A population of components, with just one parameter for the **size,** i. e. number of components to create. Note: quite often this is used with type= **populationList** which means the size is determined by the number of **instance** s ( with **location** s ) in the list. The **size** attribute is still set, and there will be a validation error if this does not match the number in the list.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -17130,6 +17159,8 @@ def __str__(self): class Region(Base): + """Region -- Initial attempt to specify 3D region for placing cells. Work in progress. . .""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("spaces", "NmlId", 0, 1, {"use": "optional", "name": "spaces"}), @@ -17572,6 +17603,8 @@ def _buildChildren( class Network(Standalone): + """Network -- Network containing: **population** s ( potentially of type **populationList** , and so specifying a list of cell **location** s ); **projection** s ( with lists of **connection** s ) and/or **explicitConnection** s; and **inputList** s ( with lists of **input** s ) and/or **explicitInput** s. Note: often in NeuroML this will be of type **networkWithTemperature** if there are temperature dependent elements ( e. g. ion channels ).""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("type", "networkTypes", 0, 1, {"use": "optional", "name": "type"}), @@ -18328,6 +18361,8 @@ def exportHdf5(self, h5file, h5Group): class TransientPoissonFiringSynapse(Standalone): + """TransientPoissonFiringSynapse -- Poisson spike generator firing at **averageRate** after a **delay** and for a **duration,** connected to single **synapse** that is triggered every time a spike is generated, providing an input current. Similar to ComponentType **poissonFiringSynapse** .""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -18690,6 +18725,8 @@ def _buildChildren( class PoissonFiringSynapse(Standalone): + """PoissonFiringSynapse -- Poisson spike generator firing at **averageRate,** which is connected to single **synapse** that is triggered every time a spike is generated, producing an input current. See also **transientPoissonFiringSynapse** .""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -18961,6 +18998,8 @@ def _buildChildren( class SpikeGeneratorPoisson(Standalone): + """SpikeGeneratorPoisson -- Generator of spikes whose ISI is distributed according to an exponential PDF with scale: 1 / **averageRate**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -19209,6 +19248,8 @@ def _buildChildren( class SpikeGeneratorRandom(Standalone): + """SpikeGeneratorRandom -- Generator of spikes with a random interspike interval of at least **minISI** and at most **maxISI**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -19453,6 +19494,8 @@ def _buildChildren( class SpikeGenerator(Standalone): + """SpikeGenerator -- Simple generator of spikes at a regular interval set by **period**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -19660,6 +19703,8 @@ def _buildChildren( class TimedSynapticInput(Standalone): + """TimedSynapticInput -- Spike array connected to a single **synapse,** producing a current triggered by each **spike** in the array.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("synapse", "NmlId", 0, 0, {"use": "required", "name": "synapse"}), @@ -19938,6 +19983,8 @@ def _buildChildren( class SpikeArray(Standalone): + """SpikeArray -- Set of spike ComponentTypes, each emitting one spike at a certain time. Can be used to feed a predetermined spike train into a cell""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -20124,6 +20171,8 @@ def _buildChildren( class Spike(BaseNonNegativeIntegerId): + """Spike -- Emits a single spike at the specified **time**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -20315,6 +20364,8 @@ def _buildChildren( class VoltageClampTriple(Standalone): + """VoltageClampTriple -- Voltage clamp with 3 clamp levels. Applies a variable current **i** ( through **simpleSeriesResistance** ) to try to keep parent cell at **conditioningVoltage** until time **delay,** **testingVoltage** until **delay** + **duration,** and **returnVoltage** afterwards. Only enabled if **active** = 1.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("active", "ZeroOrOne", 0, 0, {"use": "required", "name": "active"}), @@ -20800,6 +20851,8 @@ def _buildChildren( class VoltageClamp(Standalone): + """VoltageClamp -- Voltage clamp. Applies a variable current **i** to try to keep parent at **targetVoltage.** Not yet fully tested!!! Consider using voltageClampTriple!!""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -21166,6 +21219,8 @@ def _buildChildren( class CompoundInputDL(Standalone): + """CompoundInputDL -- Generates a current which is the sum of all its child **basePointCurrentDL** elements, e. g. can be a combination of **pulseGeneratorDL** , **sineGeneratorDL** elements producing a single **i.** Scaled by **weight,** if set""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -21438,6 +21493,8 @@ def _buildChildren( class CompoundInput(Standalone): + """CompoundInput -- Generates a current which is the sum of all its child **basePointCurrent** element, e. g. can be a combination of **pulseGenerator** , **sineGenerator** elements producing a single **i.** Scaled by **weight,** if set""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -21710,6 +21767,8 @@ def _buildChildren( class RampGeneratorDL(Standalone): + """RampGeneratorDL -- Dimensionless equivalent of **rampGenerator** . Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the dimensionless current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -22077,6 +22136,8 @@ def _buildChildren( class RampGenerator(Standalone): + """RampGenerator -- Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -22444,6 +22505,8 @@ def _buildChildren( class SineGeneratorDL(Standalone): + """SineGeneratorDL -- Dimensionless equivalent of **sineGenerator** . Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -22791,6 +22854,8 @@ def _buildChildren( class SineGenerator(Standalone): + """SineGenerator -- Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -23170,7 +23235,7 @@ def _buildChildren( class PulseGeneratorDL(Standalone): - """PulseGeneratorDL -- Generates a constant current pulse of a certain amplitude (non dimensional) for a specified duration after a delay.""" + """PulseGeneratorDL -- Dimensionless equivalent of **pulseGenerator** . Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -23477,7 +23542,7 @@ def _buildChildren( class PulseGenerator(Standalone): - """PulseGenerator -- Generates a constant current pulse of a certain amplitude (with dimensions for current) for a specified duration after a delay.""" + """PulseGenerator -- Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -24188,6 +24253,8 @@ class ChannelDensityGHK2(Base): It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. + * ChannelDensityGHK2 -- Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github. com/OpenSourceBrain/ghk-nernst. + """ __hash__ = GeneratedsSuper.__hash__ @@ -24536,6 +24603,8 @@ class ChannelDensityGHK(Base): It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. + * ChannelDensityGHK -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + """ __hash__ = GeneratedsSuper.__hash__ @@ -24666,7 +24735,7 @@ def validate_Nml2Quantity_permeability(self, value): validate_Nml2Quantity_permeability_patterns_ = [ [ - "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m_per_s| um_per_ms|cm_per_s|cm_per_ms))$" + "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m_per_s|um_per_ms|cm_per_s|cm_per_ms))$" ] ] @@ -24882,6 +24951,8 @@ class ChannelDensityNernst(Base): It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. + * ChannelDensityNernst -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + """ __hash__ = GeneratedsSuper.__hash__ @@ -25291,6 +25362,8 @@ class ChannelDensity(Base): It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. + * ChannelDensity -- Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** + """ __hash__ = GeneratedsSuper.__hash__ @@ -25768,6 +25841,8 @@ class ChannelDensityNonUniformGHK(Base): It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. + * ChannelDensityNonUniformGHK -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + """ __hash__ = GeneratedsSuper.__hash__ @@ -26056,6 +26131,8 @@ class ChannelDensityNonUniformNernst(Base): It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. + * ChannelDensityNonUniformNernst -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + """ __hash__ = GeneratedsSuper.__hash__ @@ -26346,6 +26423,8 @@ class ChannelDensityNonUniform(Base): It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. + * ChannelDensityNonUniform -- Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + """ __hash__ = GeneratedsSuper.__hash__ @@ -26688,6 +26767,8 @@ class ChannelPopulation(Base): It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. + * ChannelPopulation -- Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** + """ __hash__ = GeneratedsSuper.__hash__ @@ -27106,10 +27187,7 @@ def _buildChildren( class BiophysicalProperties2CaPools(Standalone): - """BiophysicalProperties2CaPools -- Standalone element which is usually inside a single cell, but could be outside and - referenced by id. - - """ + """BiophysicalProperties2CaPools -- The biophysical properties of the **cell** , including the **membraneProperties2CaPools** and the **intracellularProperties2CaPools** for a cell with two Ca pools""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -27389,10 +27467,7 @@ def _buildChildren( class BiophysicalProperties(Standalone): - """BiophysicalProperties -- Standalone element which is usually inside a single cell, but could be outside and - referenced by id. - - """ + """BiophysicalProperties -- The biophysical properties of the **cell** , including the **membraneProperties** and the **intracellularProperties**""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -27664,6 +27739,8 @@ def _buildChildren( class InhomogeneousParameter(Base): + """InhomogeneousParameter -- An inhomogeneous parameter specified across the **segmentGroup** ( see **variableParameter** for usage ).""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -27961,6 +28038,8 @@ def _buildChildren( class SegmentGroup(Base): + """SegmentGroup -- A method to describe a group of **segment** s in a **morphology** , e. g. soma_group, dendrite_group, axon_group. While a name is useful to describe the group, the **neuroLexId** attribute can be used to explicitly specify the meaning of the group, e. g. sao1044911821 for 'Neuronal Cell Body', sao1211023249 for 'Dendrite'. The **segment** s in this group can be specified as: a list of individual **member** segments; a **path** , all of the segments along which should be included; a **subTree** of the **cell** to include; other segmentGroups to **include** ( so all segments from those get included here ). An **inhomogeneousParameter** can be defined on the region of the cell specified by this group ( see **variableParameter** for usage ).""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -28453,6 +28532,8 @@ def __repr__(self): class Segment(BaseNonNegativeIntegerId): + """Segment -- A segment defines the smallest unit within a possibly branching structure ( **morphology** ), such as a dendrite or axon. Its **id** should be a nonnegative integer ( usually soma/root = 0 ). Its end points are given by the **proximal** and **distal** points. The **proximal** point can be omitted, usually because it is the same as a point on the **parent** segment, see **proximal** for details. **parent** specifies the parent segment. The first segment of a **cell** ( with no **parent** ) usually represents the soma. The shape is normally a cylinder ( radii of the **proximal** and **distal** equal, but positions different ) or a conical frustum ( radii and positions different ). If the x, y, x positions of the **proximal** and **distal** are equal, the segment can be interpreted as a sphere, and in this case the radii of these points must be equal. NOTE: LEMS does not yet support multicompartmental modelling, so the Dynamics here is only appropriate for single compartment modelling.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("name", "xs:string", 0, 1, {"use": "optional", "name": "name"}), @@ -28837,10 +28918,7 @@ def surface_area(self): class Morphology(Standalone): - """Morphology -- Standalone element which is usually inside a single cell, but could be outside and - referenced by id. - - """ + """Morphology -- The collection of **segment** s which specify the 3D structure of the cell, along with a number of **segmentGroup** s""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -29074,6 +29152,8 @@ def num_segments(self): class BaseCell(Standalone): + """BaseCell -- Base type of any cell ( e. g. point neuron like **izhikevich2007Cell** , or a morphologically detailed **Cell** with **segment** s ) which can be used in a **population**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -29243,6 +29323,8 @@ def _buildChildren( class BaseSynapse(Standalone): + """BaseSynapse -- Base type for all synapses, i. e. ComponentTypes which produce a current ( dimension current ) and change Dynamics in response to an incoming event. cno_0000009""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -29417,7 +29499,10 @@ def _buildChildren( class FixedFactorConcentrationModel(Standalone): - """ion -- Should not be required, as it's present on the species element!""" + """ion -- Should not be required, as it's present on the species element! + FixedFactorConcentrationModel -- Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -29818,7 +29903,10 @@ def _buildChildren( class DecayingPoolConcentrationModel(Standalone): - """ion -- Should not be required, as it's present on the species element!""" + """ion -- Should not be required, as it's present on the species element! + DecayingPoolConcentrationModel -- Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -30637,6 +30725,8 @@ def _buildChildren( class GateFractional(Base): + """GateFractional -- Gate composed of subgates contributing with fractional conductance""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -30964,6 +31054,8 @@ def _buildChildren( class GateHHInstantaneous(Base): + """GateHHInstantaneous -- Gate which follows the general Hodgkin Huxley formalism but is instantaneous, so tau = 0 and gate follows exactly inf value""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -31264,6 +31356,8 @@ def _buildChildren( class GateHHRatesInf(Base): + """GateHHRatesInf -- Gate which follows the general Hodgkin Huxley formalism""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -31645,6 +31739,8 @@ def _buildChildren( class GateHHRatesTau(Base): + """GateHHRatesTau -- Gate which follows the general Hodgkin Huxley formalism""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -32026,6 +32122,8 @@ def _buildChildren( class GateHHRatesTauInf(Base): + """GateHHRatesTauInf -- Gate which follows the general Hodgkin Huxley formalism""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -32446,6 +32544,8 @@ def _buildChildren( class GateHHTauInf(Base): + """GateHHTauInf -- Gate which follows the general Hodgkin Huxley formalism""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -32794,6 +32894,8 @@ def _buildChildren( class GateHHRates(Base): + """GateHHRates -- Gate which follows the general Hodgkin Huxley formalism""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -33662,6 +33764,8 @@ def _buildChildren( class GateKS(Base): + """GateKS -- A gate which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -34123,6 +34227,8 @@ def _buildChildren( class TauInfTransition(Base): + """TauInfTransition -- KS Transition specified in terms of time constant **tau** and steady state **inf**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("from_", "NmlId", 0, 0, {"use": "required", "name": "from_"}), @@ -34414,6 +34520,8 @@ def _buildChildren( class ReverseTransition(Base): + """ReverseTransition -- A reverse only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("from_", "NmlId", 0, 0, {"use": "required", "name": "from_"}), @@ -34661,6 +34769,8 @@ def _buildChildren( class ForwardTransition(Base): + """ForwardTransition -- A forward only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("from_", "NmlId", 0, 0, {"use": "required", "name": "from_"}), @@ -34908,6 +35018,8 @@ def _buildChildren( class OpenState(Base): + """OpenState -- A **KSState** with **relativeConductance** of 1""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -35040,6 +35152,8 @@ def _buildChildren( class ClosedState(Base): + """ClosedState -- A **KSState** with **relativeConductance** of 0""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -35177,8 +35291,8 @@ def _buildChildren( class IonChannelKS(Standalone): - """Kinetic scheme based ion channel. - IonChannelKS -- Kinetic scheme based ion channel. + """A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them + IonChannelKS -- A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them """ @@ -38784,6 +38898,8 @@ def append(self, element): class BasePynnSynapse(BaseSynapse): + """BasePynnSynapse -- Base type for all PyNN synapses. Note, the current **I** produced is dimensionless, but it requires a membrane potential **v** with dimension voltage""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -38976,6 +39092,8 @@ def _buildChildren( class basePyNNCell(BaseCell): + """basePyNNCell -- Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble. org/trac/PyNN/wiki/StandardModels""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("cm", "xs:float", 0, 0, {"use": "required", "name": "cm"}), @@ -39228,7 +39346,7 @@ def _buildChildren( class ContinuousProjection(BaseProjection): - """ContinuousProjection -- Projection between two populations consisting of analog connections (e.g. graded synapses)""" + """ContinuousProjection -- A projection between **presynapticPopulation** and **postsynapticPopulation** through components **preComponent** at the start and **postComponent** at the end of a **continuousConnection** or **continuousConnectionInstance** . Can be used for analog synapses.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -39606,7 +39724,7 @@ def exportHdf5(self, h5file, h5Group): class ElectricalProjection(BaseProjection): - """ElectricalProjection -- Projection between two populations consisting of electrical connections (gap junctions)""" + """ElectricalProjection -- A projection between **presynapticPopulation** to another **postsynapticPopulation** through gap junctions.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -40748,7 +40866,7 @@ def _buildChildren( class Projection(BaseProjection): - """Projection -- Projection (set of synaptic connections) between two populations. Chemical/event based synaptic transmission""" + """Projection -- Projection from one population, **presynapticPopulation** to another, **postsynapticPopulation,** through **synapse.** Contains lists of **connection** or **connectionWD** elements.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -41128,6 +41246,8 @@ def __str__(self): class SpikeGeneratorRefPoisson(SpikeGeneratorPoisson): + """SpikeGeneratorRefPoisson -- Generator of spikes whose ISI distribution is the maximum entropy distribution over [ **minimumISI,** +infinity ) with mean: 1 / **averageRate**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -41557,6 +41677,8 @@ def _buildChildren( class ChannelDensityNernstCa2(ChannelDensityNernst): + """ChannelDensityNernstCa2 -- This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github. com/OpenSourceBrain/ghk-nernst.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -41730,6 +41852,8 @@ def _buildChildren( class ChannelDensityVShift(ChannelDensity): + """ChannelDensityVShift -- Same as **channelDensity** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -41971,6 +42095,8 @@ class Cell(BaseCell): * biophysicalProperties -- Should only be used if biophysicalProperties element is outside the cell. This points to the id of the biophysicalProperties + * Cell -- Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. + """ __hash__ = GeneratedsSuper.__hash__ @@ -42688,6 +42814,8 @@ def summary(self): class PinskyRinzelCA3Cell(BaseCell): + """PinskyRinzelCA3Cell -- Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github. com/OpenSourceBrain/PinskyRinzelModel""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -43573,6 +43701,8 @@ def _buildChildren( class FitzHughNagumo1969Cell(BaseCell): + """FitzHughNagumo1969Cell -- The Fitzhugh Nagumo model is a two-dimensional simplification of the Hodgkin-Huxley model of spike generation in squid giant axons. This system was suggested by FitzHugh ( FitzHugh R. [1961]: Impulses and physiological states in theoretical models of nerve membrane. Biophysical J. 1:445-466 ), who called it " Bonhoeffer-van der Pol model ", and the equivalent circuit by Nagumo et al. ( Nagumo J. , Arimoto S. , and Yoshizawa S. [1962] An active pulse transmission line simulating nerve axon. Proc IRE. 50:2061-2070. 1962 ). This version corresponds to the one described in FitzHugh R. [1969]: Mathematical models of excitation and propagation in nerve. Chapter 1 ( pp. 1-85 in H. P. Schwan, ed. Biological Engineering, McGraw-Hill Book Co. , N. Y. )""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("a", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "a"}), @@ -43885,6 +44015,8 @@ def _buildChildren( class FitzHughNagumoCell(BaseCell): + """FitzHughNagumoCell -- Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github. com/NeuroML/NeuroML2/issues/42 )""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("I", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "I"}), @@ -44096,7 +44228,10 @@ def _buildChildren( class BaseCellMembPotCap(BaseCell): - """C -- This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007""" + """C -- This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007 + BaseCellMembPotCap -- Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -44338,6 +44473,8 @@ def _buildChildren( class IzhikevichCell(BaseCell): + """IzhikevichCell -- Cell based on the 2003 model of Izhikevich, see http://izhikevich. org/publications/spikes. htm""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -44680,6 +44817,8 @@ def _buildChildren( class IafCell(BaseCell): + """IafCell -- Integrate and fire cell with capacitance **C,** **leakConductance** and **leakReversal**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -45085,6 +45224,8 @@ def _buildChildren( class IafTauCell(BaseCell): + """IafTauCell -- Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time constant **tau**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -45423,7 +45564,10 @@ def _buildChildren( class GradedSynapse(BaseSynapse): - """GradedSynapse -- Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html.""" + """GradedSynapse -- Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html. + Graded/analog synapse. Based on synapse in Methods of http://www. nature. com/neuro/journal/v7/n12/abs/nn1352. html + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -45796,7 +45940,10 @@ def _buildChildren( class LinearGradedSynapse(BaseSynapse): - """LinearGradedSynapse -- Behaves just like a one way gap junction.""" + """LinearGradedSynapse -- Behaves just like a one way gap junction. + Behaves just like a one way gap junction. + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -46021,7 +46168,10 @@ def _buildChildren( class SilentSynapse(BaseSynapse): - """SilentSynapse -- Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection (continuousConnection).""" + """SilentSynapse -- Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection (continuousConnection). + Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection. + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -46172,7 +46322,10 @@ def _buildChildren( class GapJunction(BaseSynapse): - """GapJunction -- Gap junction/single electrical connection""" + """GapJunction -- Gap junction/single electrical connection + Gap junction/single electrical connection + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -46385,6 +46538,8 @@ def _buildChildren( class BaseCurrentBasedSynapse(BaseSynapse): + """BaseCurrentBasedSynapse -- Synapse model which produces a synaptic current.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -46573,6 +46728,8 @@ def _buildChildren( class BaseVoltageDepSynapse(BaseSynapse): + """BaseVoltageDepSynapse -- Base type for synapses with a dependence on membrane potential""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -46759,10 +46916,7 @@ def _buildChildren( class IonChannel(IonChannelScalable): - """IonChannel -- Note ionChannel and ionChannelHH are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. - One of these should be removed, probably ionChannelHH. - - """ + """IonChannel -- Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -47413,6 +47567,8 @@ def _buildChildren( class AlphaCurrSynapse(BasePynnSynapse): + """AlphaCurrSynapse -- Alpha synapse: rise time and decay time are both tau_syn. Current based synapse.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -47571,6 +47727,8 @@ def _buildChildren( class ExpCurrSynapse(BasePynnSynapse): + """ExpCurrSynapse -- Current based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn )""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -47721,6 +47879,8 @@ def _buildChildren( class AlphaCondSynapse(BasePynnSynapse): + """AlphaCondSynapse -- Alpha synapse: rise time and decay time are both tau_syn. Conductance based synapse.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("e_rev", "xs:float", 0, 0, {"use": "required", "name": "e_rev"}), @@ -47894,6 +48054,8 @@ def _buildChildren( class ExpCondSynapse(BasePynnSynapse): + """ExpCondSynapse -- Conductance based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn )""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("e_rev", "xs:float", 0, 0, {"use": "required", "name": "e_rev"}), @@ -48059,6 +48221,8 @@ def _buildChildren( class HH_cond_exp(basePyNNCell): + """HH_cond_exp -- Single-compartment Hodgkin-Huxley-type neuron with transient sodium and delayed-rectifier potassium currents using the ion channel models from Traub.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -48372,6 +48536,8 @@ def _buildChildren( class basePyNNIaFCell(basePyNNCell): + """basePyNNIaFCell -- Base type of any PyNN standard integrate and fire model""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("tau_m", "xs:float", 0, 0, {"use": "required", "name": "tau_m"}), @@ -48636,7 +48802,7 @@ def _buildChildren( class ContinuousConnection(BaseConnectionNewFormat): - """ContinuousConnection -- Individual continuous/analog synaptic connection""" + """ContinuousConnection -- An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Can be used for analog synapses.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -49002,7 +49168,7 @@ def __str__(self): class ElectricalConnection(BaseConnectionNewFormat): - """ElectricalConnection -- Individual electrical synaptic connection""" + """ElectricalConnection -- To enable connections between populations through gap junctions.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -49333,7 +49499,7 @@ def __str__(self): class ConnectionWD(BaseConnectionOldFormat): - """ConnectionWD -- Individual synaptic connection with weight and delay""" + """ConnectionWD -- Event connection between named components, which gets processed via a new instance of a synapse component which is created on the target component, includes setting of **weight** and **delay** for the synaptic connection""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -49681,7 +49847,7 @@ def get_delay_in_ms(self): class Connection(BaseConnectionOldFormat): - """Connection -- Individual chemical (event based) synaptic connection, weight==1 and no delay""" + """Connection -- Event connection directly between named components, which gets processed via a new instance of a **synapse** component which is created on the target component. Normally contained inside a **projection** element.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -49925,6 +50091,8 @@ def __str__(self): class Cell2CaPools(Cell): + """Cell2CaPools -- Variant of cell with two independent Ca2+ pools. Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -50129,6 +50297,8 @@ def _buildChildren( class AdExIaFCell(BaseCellMembPotCap): + """AdExIaFCell -- Model based on Brette R and Gerstner W ( 2005 ) Adaptive Exponential Integrate-and-Fire Model as an Effective Description of Neuronal Activity. J Neurophysiol 94:3637-3642""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -50652,6 +50822,8 @@ def _buildChildren( class Izhikevich2007Cell(BaseCellMembPotCap): + """Izhikevich2007Cell -- Cell based on the modified Izhikevich model in Izhikevich 2007, Dynamical systems in neuroscience, MIT Press""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -51186,6 +51358,8 @@ def _buildChildren( class IafRefCell(IafCell): + """IafRefCell -- Integrate and fire cell with capacitance **C,** **leakConductance,** **leakReversal** and refractory period **refract**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -51404,6 +51578,8 @@ def _buildChildren( class IafTauRefCell(IafTauCell): + """IafTauRefCell -- Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time course **tau.** It has a refractory period of **refract** after spiking""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -51625,6 +51801,8 @@ def _buildChildren( class DoubleSynapse(BaseVoltageDepSynapse): + """DoubleSynapse -- Synapse consisting of two independent synaptic mechanisms ( e. g. AMPA-R and NMDA-R ), which can be easily colocated in connections""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("synapse1", "NmlId", 0, 0, {"use": "required", "name": "synapse1"}), @@ -51899,6 +52077,8 @@ def _buildChildren( class AlphaCurrentSynapse(BaseCurrentBasedSynapse): + """AlphaCurrentSynapse -- Alpha current synapse: rise time and decay time are both **tau.**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -52171,6 +52351,8 @@ def _buildChildren( class BaseConductanceBasedSynapseTwo(BaseVoltageDepSynapse): + """BaseConductanceBasedSynapseTwo -- Synapse model suited for a sum of two expTwoSynapses which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -52512,6 +52694,8 @@ def _buildChildren( class BaseConductanceBasedSynapse(BaseVoltageDepSynapse): + """BaseConductanceBasedSynapse -- Synapse model which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -52822,7 +53006,7 @@ def _buildChildren( class IonChannelVShift(IonChannel): - """IonChannelVShift -- Same as ionChannel, but with a vShift parameter to change voltage activation of gates. The exact usage of vShift in expressions for rates is determined by the individual gates.""" + """IonChannelVShift -- Same as **ionChannel** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -53073,10 +53257,7 @@ def _buildChildren( class IonChannelHH(IonChannel): - """IonChannelHH -- Note ionChannel and ionChannelHH are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. - One of these should be removed, probably ionChannelHH. - - """ + """IonChannelHH -- Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -53257,6 +53438,8 @@ def _buildChildren( class IF_curr_exp(basePyNNIaFCell): + """IF_curr_exp -- Leaky integrate and fire model with fixed threshold and decaying-exponential post-synaptic current""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -53432,6 +53615,8 @@ def _buildChildren( class IF_curr_alpha(basePyNNIaFCell): + """IF_curr_alpha -- Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic current""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -53607,6 +53792,8 @@ def _buildChildren( class basePyNNIaFCondCell(basePyNNIaFCell): + """basePyNNIaFCondCell -- Base type of conductance based PyNN IaF cell models""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -53848,7 +54035,7 @@ def _buildChildren( class ContinuousConnectionInstance(ContinuousConnection): - """ContinuousConnectionInstance -- Individual continuous/analog synaptic connection - instance based""" + """ContinuousConnectionInstance -- An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -54069,7 +54256,7 @@ def __str__(self): class ElectricalConnectionInstance(ElectricalConnection): - """ElectricalConnectionInstance -- Projection between two populations consisting of analog connections (e.g. graded synapses)""" + """ElectricalConnectionInstance -- To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -54286,6 +54473,8 @@ def __str__(self): class ExpThreeSynapse(BaseConductanceBasedSynapseTwo): + """ExpThreeSynapse -- Ohmic synapse similar to expTwoSynapse but consisting of two components that can differ in decay times and max conductances but share the same rise time.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -54567,6 +54756,8 @@ def _buildChildren( class ExpTwoSynapse(BaseConductanceBasedSynapse): + """ExpTwoSynapse -- Ohmic synapse model whose conductance waveform on receiving an event has a rise time of **tauRise** and a decay time of **tauDecay.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -54836,6 +55027,8 @@ def _buildChildren( class ExpOneSynapse(BaseConductanceBasedSynapse): + """ExpOneSynapse -- Ohmic synapse model whose conductance rises instantaneously by ( **gbase** * **weight** ) on receiving an event, and which decays exponentially to zero with time course **tauDecay**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -55057,6 +55250,8 @@ def _buildChildren( class AlphaSynapse(BaseConductanceBasedSynapse): + """AlphaSynapse -- Ohmic synapse model where rise time and decay time are both **tau.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.**""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -55270,6 +55465,8 @@ def _buildChildren( class EIF_cond_exp_isfa_ista(basePyNNIaFCondCell): + """EIF_cond_exp_isfa_ista -- Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with exponentially-decaying post-synaptic conductance""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_("a", "xs:float", 0, 0, {"use": "required", "name": "a"}), @@ -55555,6 +55752,8 @@ def _buildChildren( class IF_cond_exp(basePyNNIaFCondCell): + """IF_cond_exp -- Leaky integrate and fire model with fixed threshold and exponentially-decaying post-synaptic conductance""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -55734,6 +55933,8 @@ def _buildChildren( class IF_cond_alpha(basePyNNIaFCondCell): + """IF_cond_alpha -- Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic conductance""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None @@ -55913,7 +56114,7 @@ def _buildChildren( class ContinuousConnectionInstanceW(ContinuousConnectionInstance): - """ContinuousConnectionInstanceW -- Individual continuous/analog synaptic connection - instance based. Includes setting of _weight for the connection""" + """ContinuousConnectionInstanceW -- An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. Includes setting of **weight** for the connection""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -56134,7 +56335,7 @@ def __str__(self): class ElectricalConnectionInstanceW(ElectricalConnectionInstance): - """ElectricalConnectionInstanceW -- Projection between two populations consisting of analog connections (e.g. graded synapses). Includes setting of weight for the connection""" + """ElectricalConnectionInstanceW -- To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. Includes setting of **weight** for the connection""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -56355,6 +56556,8 @@ def __str__(self): class BlockingPlasticSynapse(ExpTwoSynapse): + """BlockingPlasticSynapse -- Biexponential synapse that allows for optional block and plasticity mechanisms, which can be expressed as child elements.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_( @@ -56604,6 +56807,8 @@ def _buildChildren( class EIF_cond_alpha_isfa_ista(EIF_cond_exp_isfa_ista): + """EIF_cond_alpha_isfa_ista -- Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with alpha-function-shaped post-synaptic conductance""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] subclass = None From e655b5eba22fd6d2a268f35f3edbd4f8fb8ff1b8 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 17 Nov 2021 14:27:14 +0000 Subject: [PATCH 112/136] chore: sync XSD --- neuroml/nml/NeuroML_v2.2.xsd | 6196 +++++++++++++++++----------------- 1 file changed, 3141 insertions(+), 3055 deletions(-) diff --git a/neuroml/nml/NeuroML_v2.2.xsd b/neuroml/nml/NeuroML_v2.2.xsd index 85a0db7b..65131696 100644 --- a/neuroml/nml/NeuroML_v2.2.xsd +++ b/neuroml/nml/NeuroML_v2.2.xsd @@ -1,3110 +1,3196 @@ - - - - - - - - An id attribute for elements which need to be identified uniquely (normally just within their parent element). - - - - - - - - - - - A value for a physical quantity in NeuroML 2, e.g. 20, -60.0mV or 5nA - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - An id string for pointing to an entry in an annotation element related to a MIRIAM resource. Based on metaid of SBML - - - - - - - - - - - - An id string for pointing to an entry in the NeuroLex ontology. Use of this attribute is a shorthand for a full + + + + + + An id attribute for elements which need to be identified uniquely (normally just within their parent element). + + + + + + + + A value for a physical quantity in NeuroML 2, e.g. 20, -60.0mV or 5nA + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + An id string for pointing to an entry in an annotation element related to a MIRIAM resource. Based on metaid of SBML + + + + + + + + An id string for pointing to an entry in the NeuroLex ontology. Use of this attribute is a shorthand for a full RDF based reference to the MIRIAM Resource urn:miriam:neurolex, with an bqbiol:is qualifier - - - - - - - - - - - - An attribute useful as id of segments, connections, etc: integer >=0 only! - - - - - - - - - - - - - - Integer >=1 only! - - - - - - - - - - - - - - - Double >0 only - - - + + + + + + + + An attribute useful as id of segments, connections, etc: integer >=0 only! + + + + + + - - - - - Value which is either 0 or 1 - - - - + + + + Integer >=1 only! + + + + + + - - - - - - - - - - - - Textual human readable notes related to the element in question. It's useful to put these into + + + + + + Textual human readable notes related to the element in question. It's useful to put these into the NeuroML files instead of XML comments, as the notes can be extracted and repeated in the files to which the NeuroML is mapped. - - - - - - - - A property ( a **tag** and **value** pair ), which can be on any **baseStandalone** either as a direct child, or within an **Annotation** . Generally something which helps the visual display or facilitates simulation of a Component, but is not a core physiological property. Common examples include: **numberInternalDivisions,** equivalent of nseg in NEURON; **radius,** for a radius to use in graphical displays for abstract cells ( i. e. without defined morphologies ); **color,** the color to use for a **Population** or **populationList** of cells; **recommended_dt_ms,** the recommended timestep to use for simulating a **Network** , **recommended_duration_ms** the recommended duration to use when running a **Network** - - - - - - - - - A structured annotation containing metadata, specifically RDF or **property** elements - - - - - - - - + + + + + + A property ( a **tag** and **value** pair ), which can be on any **baseStandalone** either as a direct child, or within an **Annotation** . Generally something which helps the visual display or facilitates simulation of a Component, but is not a core physiological property. Common examples include: **numberInternalDivisions,** equivalent of nseg in NEURON; **radius,** for a radius to use in graphical displays for abstract cells ( i. e. without defined morphologies ); **color,** the color to use for a **Population** or **populationList** of cells; **recommended_dt_ms,** the recommended timestep to use for simulating a **Network** , **recommended_duration_ms** the recommended duration to use when running a **Network** + + + + + + + + + + + A structured annotation containing metadata, specifically RDF or **property** elements + + + + + + + + + + + + + Contains an extension to NeuroML by creating custom LEMS ComponentType. + + + + + + + + + + + + + + + + + LEMS ComponentType for Constant. + + + + + + + + + LEMS Exposure (ComponentType property) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LEMS ComponentType for Dynamics + + + + + + + + + + + LEMS ComponentType for DerivedVariable + + + + + + + + + + + + + + + + + - Contains an extension to NeuroML by creating custom LEMS ComponentType. + LEMS ComponentType for ConditionalDerivedVariable - - - - - - - + - - - - - - - - - - - LEMS ComponentType for Constant. - - - - - - - - - - - - - LEMS Exposure (ComponentType property) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - LEMS ComponentType for Dynamics - + + + + + + + + + + + + + + Float value restricted to between 1 and 0 + + + + + + + + + + + + The root NeuroML element. + + + + + + - - - - - - - - - - - LEMS ComponentType for DerivedVariable - - - - - - - - - - - - - - - - - - - - - LEMS ComponentType for ConditionalDerivedVariable - - - - - - - - - - - - - - - - - - - - - - - Float value restricted to between 1 and 0 - - - - - - - - - - - - - - - - The root NeuroML element. - - - - - - - - - - - - - - - - - - - - - - + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Various types of cells which are defined in NeuroML 2. This list will be expanded... - + + + + + + Various types of cells which are defined in NeuroML 2. This list will be expanded... + + + + + + + + + + + + + + + + + + + + Various types of cells which are defined in NeuroML 2 based on PyNN standard cell models. + + + + + + + + + + + + + + Various types of synapse which are defined in NeuroML 2. This list will be expanded... + + + + + + + + + + + + + + + + + + Various types of synapse which are defined in NeuroML 2 based on PyNN standard cell/synapse models. + + + + + + + + + + + Various types of inputs which are defined in NeuroML2. This list will be expanded... + + + + + + + + + + + + + + + + + + + + + + + + + Various types of input which are defined in NeuroML 2 based on PyNN standard cell/synapse models. + + + + + + + + Various types of concentration model which are defined in NeuroML 2. This list will be expanded... + + + + + + + + + + + + - - - - - - - - - - - - - - + - - - - - - Various types of cells which are defined in NeuroML 2 based on PyNN standard cell models. - + + + + + + A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them + + + + + + + - - - - - - - - + - - - - - - Various types of synapse which are defined in NeuroML 2. This list will be expanded... - + + + + + + + + Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. + + + + + + + + + + + + + + + + + + + + + + + + + + Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. + + + + + + + + + + + + Same as **ionChannel** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. + + + + + + + + + + + + + + + + + + + + + A value for the conductance scaling which varies as a standard function of the difference between the current temperature, **temperature,** and the temperature at which the conductance was originally determined, **experimentalTemp** + + + + + + + + + + + + + + + + + + + + + + A **KSState** with **relativeConductance** of 0 + + + + + + + + + + + + + A **KSState** with **relativeConductance** of 1 + + + + + + + + + + + + + A forward only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** + + + + + - - - - - - - - - - - + + - - - - - - Various types of synapse which are defined in NeuroML 2 based on PyNN standard cell/synapse models. - + + + + + + + + + A reverse only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** + + + + + - - - - + + - - - - - Various types of inputs which are defined in NeuroML2. This list will be expanded... - + + + + + + + + + + + + + + + KS Transition specified in terms of time constant **tau** and steady state **inf** + + + + + + + + + + + + + + + + + + A gate which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them + + + + + - - - - - - - - - - - - - - - - - - + + + + + + + + - - - - - Various types of input which are defined in NeuroML 2 based on PyNN standard cell/synapse models. - + + + + + + + + Note all sub elements for gateHHrates, gateHHratesTau, gateFractional etc. allowed here. Which are valid should be constrained by what type is set + + + - + + + + + + + - - - - - - - Various types of concentration model which are defined in NeuroML 2. This list will be expanded... - + + + + + + + + + Gate which follows the general Hodgkin Huxley formalism + + + + + + + + + + + + + + + + + + + + Gate which follows the general Hodgkin Huxley formalism + + + + + + + + + + + + + + + + + + + + Gate which follows the general Hodgkin Huxley formalism + + + + + + + + + + + + + + + + + + + + + + Gate which follows the general Hodgkin Huxley formalism + + + + + + + + + + + + + + + + + + + + + Gate which follows the general Hodgkin Huxley formalism + + + + + + + + + + + + + + + + + + + + + Gate which follows the general Hodgkin Huxley formalism but is instantaneous, so tau = 0 and gate follows exactly inf value + + + + + + + + + + + + + + + + + + Gate composed of subgates contributing with fractional conductance + + + + + - - + + + - - - - - - - - - - - - - - - - - - - - - - A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** + + + + + + + + Should not be required, as it's present on the species element! + + + + + + + + + + + + Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. + + + + + + + + Should not be required, as it's present on the species element! + + + + + + + + + + + + + + + Base type for all synapses, i. e. ComponentTypes which produce a current ( dimension current ) and change Dynamics in response to an incoming event. cno_0000009 + + + + + + + + + + + + + Base type for synapses with a dependence on membrane potential + + + + + + + + + + + + + Synapse model which produces a synaptic current. + + + + + + + + + + + + + Synapse model which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 + + + + + + + + + + + + + + Synapse model suited for a sum of two expTwoSynapses which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 + + + + + + + + + + + + + + + Gap junction/single electrical connection + + + + + + Gap junction/single electrical connection + + + + + + + + + + Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection. + + + + + + Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection (continuousConnection). + + + + + + + + + + Behaves just like a one way gap junction. + + + + + + Behaves just like a one way gap junction. + + + + + + + + + + Graded/analog synapse. Based on synapse in Methods of http://www. nature. com/neuro/journal/v7/n12/abs/nn1352. html + + + + + + Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html. + + + + + + + + + + + + + + Alpha current synapse: rise time and decay time are both **tau.** + + + + + + + + + + + + + + Ohmic synapse model where rise time and decay time are both **tau.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** + + + + + + + + + + + + + Ohmic synapse model whose conductance rises instantaneously by ( **gbase** * **weight** ) on receiving an event, and which decays exponentially to zero with time course **tauDecay** + + + + + + + + + + + + + Ohmic synapse model whose conductance waveform on receiving an event has a rise time of **tauRise** and a decay time of **tauDecay.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** + + + + + + + + + + + + + + Ohmic synapse similar to expTwoSynapse but consisting of two components that can differ in decay times and max conductances but share the same rise time. + + + + + + + + + + + + + + + Synapse consisting of two independent synaptic mechanisms ( e. g. AMPA-R and NMDA-R ), which can be easily colocated in connections + + + + + + + + + + + + + + + + Biexponential synapse that allows for optional block and plasticity mechanisms, which can be expressed as child elements. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Base type of any cell ( e. g. point neuron like **izhikevich2007Cell** , or a morphologically detailed **Cell** with **segment** s ) which can be used in a **population** + + + + + + + + + + + + Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time constant **tau** + + + + + + + + + + + + + + + + Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time course **tau.** It has a refractory period of **refract** after spiking + + + + + + + + + + + + + Integrate and fire cell with capacitance **C,** **leakConductance** and **leakReversal** + + + + + + + + + + + + + + + + + Integrate and fire cell with capacitance **C,** **leakConductance,** **leakReversal** and refractory period **refract** + + + + + + + + + + + + + Cell based on the 2003 model of Izhikevich, see http://izhikevich. org/publications/spikes. htm + + + + + + + + + + + + + + + + + + Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) + + + + + + + + This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007 + + + + + + + + + + + + Cell based on the modified Izhikevich model in Izhikevich 2007, Dynamical systems in neuroscience, MIT Press + + + + + + + + + + + + + + + + + + + + + Model based on Brette R and Gerstner W ( 2005 ) Adaptive Exponential Integrate-and-Fire Model as an Effective Description of Neuronal Activity. J Neurophysiol 94:3637-3642 + + + + + + + + + + + + + + + + + + + + + + Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github. com/NeuroML/NeuroML2/issues/42 ) + + + + + + + + + + + + + The Fitzhugh Nagumo model is a two-dimensional simplification of the Hodgkin-Huxley model of spike generation in squid giant axons. This system was suggested by FitzHugh ( FitzHugh R. [1961]: Impulses and physiological states in theoretical models of nerve membrane. Biophysical J. 1:445-466 ), who called it " Bonhoeffer-van der Pol model ", and the equivalent circuit by Nagumo et al. ( Nagumo J. , Arimoto S. , and Yoshizawa S. [1962] An active pulse transmission line simulating nerve axon. Proc IRE. 50:2061-2070. 1962 ). This version corresponds to the one described in FitzHugh R. [1969]: Mathematical models of excitation and propagation in nerve. Chapter 1 ( pp. 1-85 in H. P. Schwan, ed. Biological Engineering, McGraw-Hill Book Co. , N. Y. ) + + + + + + + + + + + + + + + + + + Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github. com/OpenSourceBrain/PinskyRinzelModel + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. + + + + + + + + + + + + Should only be used if morphology element is outside the cell. + This points to the id of the morphology + + + + + + Should only be used if biophysicalProperties element is outside the cell. + This points to the id of the biophysicalProperties + + + + + + + + + + Variant of cell with two independent Ca2+ pools. Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. + + + + + + + + + + + + + + + The collection of **segment** s which specify the 3D structure of the cell, along with a number of **segmentGroup** s + + + + + + + + + + + + + + + + + A segment defines the smallest unit within a possibly branching structure ( **morphology** ), such as a dendrite or axon. Its **id** should be a nonnegative integer ( usually soma/root = 0 ). Its end points are given by the **proximal** and **distal** points. The **proximal** point can be omitted, usually because it is the same as a point on the **parent** segment, see **proximal** for details. **parent** specifies the parent segment. The first segment of a **cell** ( with no **parent** ) usually represents the soma. The shape is normally a cylinder ( radii of the **proximal** and **distal** equal, but positions different ) or a conical frustum ( radii and positions different ). If the x, y, x positions of the **proximal** and **distal** are equal, the segment can be interpreted as a sphere, and in this case the radii of these points must be equal. NOTE: LEMS does not yet support multicompartmental modelling, so the Dynamics here is only appropriate for single compartment modelling. + + + + + + + + + + + + + + + + + + + + + + + Base type for ComponentTypes which specify an ( **x,** **y,** **z** ) coordinate along with a **diameter.** Note: no dimension used in the attributes for these coordinates! These are assumed to have dimension micrometer ( 10^-6 m ). This is due to micrometers being the default option for the majority of neuronal morphology formats, and dimensions are omitted here to facilitate reading and writing of morphologies in NeuroML. + + + + + + + + + + + + + A method to describe a group of **segment** s in a **morphology** , e. g. soma_group, dendrite_group, axon_group. While a name is useful to describe the group, the **neuroLexId** attribute can be used to explicitly specify the meaning of the group, e. g. sao1044911821 for 'Neuronal Cell Body', sao1211023249 for 'Dendrite'. The **segment** s in this group can be specified as: a list of individual **member** segments; a **path** , all of the segments along which should be included; a **subTree** of the **cell** to include; other segmentGroups to **include** ( so all segments from those get included here ). An **inhomogeneousParameter** can be defined on the region of the cell specified by this group ( see **variableParameter** for usage ). + + + + + + + + + + + + + + + + + + + + + + An inhomogeneous parameter specified across the **segmentGroup** ( see **variableParameter** for usage ). + + + + + + + + + + + + + + + + + + Allowed metrics for InhomogeneousParam + + + + + + + + + + + + + + A single identified **segment** which is part of the **segmentGroup** + + + + + + + + + Include all members of another **segmentGroup** in this group + + + + + + + + + Include all the **segment** s between those specified by **from** and **to** , inclusive + + + + + + + + + + + + Include all the **segment** s distal to that specified by **from** in the **segmentGroup** + + + + + + + + + + + + + + + + + + The biophysical properties of the **cell** , including the **membraneProperties** and the **intracellularProperties** + + + + + + + + + + + + + + + + + + The biophysical properties of the **cell** , including the **membraneProperties2CaPools** and the **intracellularProperties2CaPools** for a cell with two Ca pools + + + + + + + + + + + + + + + + + + Properties specific to the membrane, such as the **populations** of channels, **channelDensities,** **specificCapacitance,** etc. + + + + + + + + + + + + + + + + + + + + + + + Variant of membraneProperties with 2 independent Ca pools + + + + + + + + + + + + + + + + Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction + + + + + + + + + + + Capacitance per unit area + + + + + + + + + + + Explicitly set initial membrane potential for the cell + + + + + + + + + + + The resistivity, or specific axial resistance, of the cytoplasm + + + + + + + + + + + Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** + + + + + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** + + + + + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + Same as **channelDensity** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. + + + + + + + + + + + + + Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + + + + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github. com/OpenSourceBrain/ghk-nernst. + + + + + + + + + + + + + Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github. com/OpenSourceBrain/ghk-nernst. + + + + + + + + + + + + + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. + + + + + + + + + + Specifies a **parameter** ( e. g. condDensity ) which can vary its value across a **segmentGroup.** The value is calculated from **value** attribute of the **inhomogeneousValue** subelement. This element is normally a child of **channelDensityNonUniform** , **channelDensityNonUniformNernst** or **channelDensityNonUniformGHK** and is used to calculate the value of the conductance, etc. which will vary on different parts of the cell. The **segmentGroup** specified here needs to define an **inhomogeneousParameter** ( referenced from **inhomogeneousParameter** in the **inhomogeneousValue** ), which calculates a **variable** ( e. g. p ) varying across the cell ( e. g. based on the path length from soma ), which is then used in the **value** attribute of the **inhomogeneousValue** ( so for example condDensity = f( p ) ) + + + + + + + + + + + + + Specifies the **value** of an **inhomogeneousParameter.** For usage see **variableParameter** + + + + + + + + - - - - - - - - - A value for the conductance scaling which varies as a standard function of the difference between the current temperature, **temperature,** and the temperature at which the conductance was originally determined, **experimentalTemp** - - - - - - - - - - - - - - - - - - - - - - - A **KSState** with **relativeConductance** of 0 - - - - - - - - - A **KSState** with **relativeConductance** of 1 - - - - - - - - - - - A forward only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** - - - - - - - - - - - - A reverse only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** - - + --> + + + Description of a chemical species identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration + + + + + + + + Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now + until LEMS implementation can select by id. TODO: remove. + + + + + + + + + + + + + + + + + + + Biophysical properties related to the intracellular space within the **cell** , such as the **resistivity** and the list of ionic **species** present. **caConc** and **caConcExt** are explicitly exposed here to facilitate accessing these values from other Components, even though **caConcExt** is clearly not an intracellular property + + + + + + + + + + + + Variant of intracellularProperties with 2 independent Ca pools + + + + + + + + + + + + + + - - + + - - - - - - - - - - - - - - KS Transition specified in terms of time constant **tau** and steady state **inf** - - - - - - - - - - - - - - - - - - - - - - A gate which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them - - - - - Note all sub elements for gateHHrates, gateHHratesTau, gateFractional etc. allowed here. Which are valid should be constrained by what type is set - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Gate which follows the general Hodgkin Huxley formalism - - - - - - - - - - - - - - - - Gate which follows the general Hodgkin Huxley formalism - - - - - - - - - - - - - - - - - Gate which follows the general Hodgkin Huxley formalism - - - - - - - - - - - - - - - - Gate which follows the general Hodgkin Huxley formalism - - - - - - - - - - - - - - - - Gate which follows the general Hodgkin Huxley formalism - - - - - - - - - - - - - Gate which follows the general Hodgkin Huxley formalism but is instantaneous, so tau = 0 and gate follows exactly inf value - - - - - - - - - - - - - - Gate composed of subgates contributing with fractional conductance - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Should not be required, as it's present on the species element! - - - - - - - - - Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** - - - - - - - - - Should not be required, as it's present on the species element! - - - - - - - - - Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. - - - - - - - - - - - - - - - Base type for all synapses, i. e. ComponentTypes which produce a current ( dimension current ) and change Dynamics in response to an incoming event. cno_0000009 - - - - - - - - - - Base type for synapses with a dependence on membrane potential - - - - - - - - - - Synapse model which produces a synaptic current. - - - - - - - - - - - - - Synapse model which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 - - - - - - - - - - - - - - Synapse model suited for a sum of two expTwoSynapses which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 - - - - - - - - - Gap junction/single electrical connection + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set + + + + - - - - - - - Gap junction/single electrical connection - - - - - Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection (continuousConnection). + + + + + + + + + + + Dimensionless equivalent of **pulseGenerator** . Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set + + + + - - - - - Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection. - - - - - - - Behaves just like a one way gap junction. + + + + + + + + + + + Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set + + + + + + + + + + + + + + + + + Dimensionless equivalent of **sineGenerator** . Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set + + + + + + + + + + + + + + + + + Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set + + + + + + + + + + + + + + + + + Dimensionless equivalent of **rampGenerator** . Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the dimensionless current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set + + + + + + + + + + + + + + + + + Generates a current which is the sum of all its child **basePointCurrent** element, e. g. can be a combination of **pulseGenerator** , **sineGenerator** elements producing a single **i.** Scaled by **weight,** if set + + + + + + + + + + + + + + + + + Generates a current which is the sum of all its child **basePointCurrentDL** elements, e. g. can be a combination of **pulseGeneratorDL** , **sineGeneratorDL** elements producing a single **i.** Scaled by **weight,** if set + + + + + + + + + + + + + + + + + Voltage clamp. Applies a variable current **i** to try to keep parent at **targetVoltage.** Not yet fully tested!!! Consider using voltageClampTriple!! + + + + + + + + + + + + + + + + Voltage clamp with 3 clamp levels. Applies a variable current **i** ( through **simpleSeriesResistance** ) to try to keep parent cell at **conditioningVoltage** until time **delay,** **testingVoltage** until **delay** + **duration,** and **returnVoltage** afterwards. Only enabled if **active** = 1. + + + + + + + + + + + + + + + + + + + Emits a single spike at the specified **time** + + + + + + + + + + + + + Set of spike ComponentTypes, each emitting one spike at a certain time. Can be used to feed a predetermined spike train into a cell + + + + + + + + + + + + + + + Spike array connected to a single **synapse,** producing a current triggered by each **spike** in the array. + + + + + + + + + + + + + + + + + Simple generator of spikes at a regular interval set by **period** + + + + + + + + + + + + + Generator of spikes with a random interspike interval of at least **minISI** and at most **maxISI** + + + + + + + + + + + + + + Generator of spikes whose ISI is distributed according to an exponential PDF with scale: 1 / **averageRate** + + + + + + + + + + + + + Generator of spikes whose ISI distribution is the maximum entropy distribution over [ **minimumISI,** +infinity ) with mean: 1 / **averageRate** + + + + + + + + + + + + + Poisson spike generator firing at **averageRate,** which is connected to single **synapse** that is triggered every time a spike is generated, producing an input current. See also **transientPoissonFiringSynapse** . + + + + + + + + + + + + + + + Poisson spike generator firing at **averageRate** after a **delay** and for a **duration,** connected to single **synapse** that is triggered every time a spike is generated, providing an input current. Similar to ComponentType **poissonFiringSynapse** . + + + + + + + + + + + + + + + + + + + + Network containing: **population** s ( potentially of type **populationList** , and so specifying a list of cell **location** s ); **projection** s ( with lists of **connection** s ) and/or **explicitConnection** s; and **inputList** s ( with lists of **input** s ) and/or **explicitInput** s. Note: often in NeuroML this will be of type **networkWithTemperature** if there are temperature dependent elements ( e. g. ion channels ). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Initial attempt to specify 3D region for placing cells. Work in progress. . . + + + + + + + + + + + + + + + + + A population of components, with just one parameter for the **size,** i. e. number of components to create. Note: quite often this is used with type= **populationList** which means the size is determined by the number of **instance** s ( with **location** s ) in the list. The **size** attribute is still set, and there will be a validation error if this does not match the number in the list. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Specifies a single instance of a component in a **population** ( placed at **location** ). + + + + + + + + + + + + + + + + + + Specifies the ( x, y, z ) location of a single **instance** of a component in a **population** + + + + + + + + + + + + + + + + + + + + + + Explicit event connection between named components, which gets processed via a new instance of a **synapse** component which is created on the target component + + + + - - - - - - - Behaves just like a one way gap junction. - - - - - - - Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html. + + + + + + + + Base for projection (set of synaptic connections) between two populations + + + + + + + + + + + Projection from one population, **presynapticPopulation** to another, **postsynapticPopulation,** through **synapse.** Contains lists of **connection** or **connectionWD** elements. + + + + - - - - - - - - - - - Graded/analog synapse. Based on synapse in Methods of http://www. nature. com/neuro/journal/v7/n12/abs/nn1352. html - - - - - - - - - - - - - Alpha current synapse: rise time and decay time are both **tau.** - - - - - - - - - - - Ohmic synapse model where rise time and decay time are both **tau.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** - - - - - - - - - - - - Ohmic synapse model whose conductance rises instantaneously by ( **gbase** * **weight** ) on receiving an event, and which decays exponentially to zero with time course **tauDecay** - - - - - - - - - - - - - Ohmic synapse model whose conductance waveform on receiving an event has a rise time of **tauRise** and a decay time of **tauDecay.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** - - - - - - - - - - - - - - Ohmic synapse similar to expTwoSynapse but consisting of two components that can differ in decay times and max conductances but share the same rise time. - - - - - - - - - - - - - - - Synapse consisting of two independent synaptic mechanisms ( e. g. AMPA-R and NMDA-R ), which can be easily colocated in connections - - - - - - - - - - - - Biexponential synapse that allows for optional block and plasticity mechanisms, which can be expressed as child elements. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Base type of any cell ( e. g. point neuron like **izhikevich2007Cell** , or a morphologically detailed **Cell** with **segment** s ) which can be used in a **population** - - - - - - - - - - - Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time constant **tau** - - - - - - - - Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time course **tau.** It has a refractory period of **refract** after spiking - - - - - - - - - - - - Integrate and fire cell with capacitance **C,** **leakConductance** and **leakReversal** - - - - - - - - Integrate and fire cell with capacitance **C,** **leakConductance,** **leakReversal** and refractory period **refract** - - - - - - - - - - - - - - Cell based on the 2003 model of Izhikevich, see http://izhikevich. org/publications/spikes. htm - - - - - - - - This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007 - - - - - - - - Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) - - - - - - - - - - - - - - - - - Cell based on the modified Izhikevich model in Izhikevich 2007, Dynamical systems in neuroscience, MIT Press - - - - - - - - - - - - - - - - - - Model based on Brette R and Gerstner W ( 2005 ) Adaptive Exponential Integrate-and-Fire Model as an Effective Description of Neuronal Activity. J Neurophysiol 94:3637-3642 - - - - - - - - - Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github. com/NeuroML/NeuroML2/issues/42 ) - - - - - - - - - - - - - - The Fitzhugh Nagumo model is a two-dimensional simplification of the Hodgkin-Huxley model of spike generation in squid giant axons. This system was suggested by FitzHugh ( FitzHugh R. [1961]: Impulses and physiological states in theoretical models of nerve membrane. Biophysical J. 1:445-466 ), who called it " Bonhoeffer-van der Pol model ", and the equivalent circuit by Nagumo et al. ( Nagumo J. , Arimoto S. , and Yoshizawa S. [1962] An active pulse transmission line simulating nerve axon. Proc IRE. 50:2061-2070. 1962 ). This version corresponds to the one described in FitzHugh R. [1969]: Mathematical models of excitation and propagation in nerve. Chapter 1 ( pp. 1-85 in H. P. Schwan, ed. Biological Engineering, McGraw-Hill Book Co. , N. Y. ) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github. com/OpenSourceBrain/PinskyRinzelModel - - - - - - - - - - - - - - - - Should only be used if morphology element is outside the cell. - This points to the id of the morphology - - - - - - - Should only be used if biophysicalProperties element is outside the cell. - This points to the id of the biophysicalProperties - - - - - - - - Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. - - - - - - - - - - - - - Variant of cell with two independent Ca2+ pools. Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. - - - - - The collection of **segment** s which specify the 3D structure of the cell, along with a number of **segmentGroup** s - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - A segment defines the smallest unit within a possibly branching structure ( **morphology** ), such as a dendrite or axon. Its **id** should be a nonnegative integer ( usually soma/root = 0 ). Its end points are given by the **proximal** and **distal** points. The **proximal** point can be omitted, usually because it is the same as a point on the **parent** segment, see **proximal** for details. **parent** specifies the parent segment. The first segment of a **cell** ( with no **parent** ) usually represents the soma. The shape is normally a cylinder ( radii of the **proximal** and **distal** equal, but positions different ) or a conical frustum ( radii and positions different ). If the x, y, x positions of the **proximal** and **distal** are equal, the segment can be interpreted as a sphere, and in this case the radii of these points must be equal. NOTE: LEMS does not yet support multicompartmental modelling, so the Dynamics here is only appropriate for single compartment modelling. - - - - - - - - - - Base type for ComponentTypes which specify an ( **x,** **y,** **z** ) coordinate along with a **diameter.** Note: no dimension used in the attributes for these coordinates! These are assumed to have dimension micrometer ( 10^-6 m ). This is due to micrometers being the default option for the majority of neuronal morphology formats, and dimensions are omitted here to facilitate reading and writing of morphologies in NeuroML. - - - - - - - - - - - - - - - - - - - - - - - - - - - A method to describe a group of **segment** s in a **morphology** , e. g. soma_group, dendrite_group, axon_group. While a name is useful to describe the group, the **neuroLexId** attribute can be used to explicitly specify the meaning of the group, e. g. sao1044911821 for 'Neuronal Cell Body', sao1211023249 for 'Dendrite'. The **segment** s in this group can be specified as: a list of individual **member** segments; a **path** , all of the segments along which should be included; a **subTree** of the **cell** to include; other segmentGroups to **include** ( so all segments from those get included here ). An **inhomogeneousParameter** can be defined on the region of the cell specified by this group ( see **variableParameter** for usage ). - - - - - - - - - - - - - - An inhomogeneous parameter specified across the **segmentGroup** ( see **variableParameter** for usage ). - - - - - Allowed metrics for InhomogeneousParam - - - - - - - - - - - - - - - - - - A single identified **segment** which is part of the **segmentGroup** - - - - Include all members of another **segmentGroup** in this group - - + + - - + + - Include all the **segment** s between those specified by **from** and **to** , inclusive - - - - - - - Include all the **segment** s distal to that specified by **from** in the **segmentGroup** - - - - - - - - - - - - - - The biophysical properties of the **cell** , including the **membraneProperties** and the **intracellularProperties** - - - - - - - - - - - - - - - - - - The biophysical properties of the **cell** , including the **membraneProperties2CaPools** and the **intracellularProperties2CaPools** for a cell with two Ca pools - - - - - - - - - - - - - - - - - + + + + + + + Base of all synaptic connections (chemical/electrical/analog, etc.) inside projections + + + + + + + + + + Base of all synaptic connections with preCellId, postSegmentId, etc. + Note: this is not the best name for these attributes, since Id is superfluous, hence BaseConnectionNewFormat + + + + + + + + + + + + + + + Base of all synaptic connections with preCell, postSegment, etc. + See BaseConnectionOldFormat + + + + + + + + + + + + + + + Event connection directly between named components, which gets processed via a new instance of a **synapse** component which is created on the target component. Normally contained inside a **projection** element. + + + + + + + + + + + + + Event connection between named components, which gets processed via a new instance of a synapse component which is created on the target component, includes setting of **weight** and **delay** for the synaptic connection + + + + + + + + + + + + + + + A projection between **presynapticPopulation** to another **postsynapticPopulation** through gap junctions. + + + + + + + - - - - - - - - - - - - - - - - - - - + + + - - Properties specific to the membrane, such as the **populations** of channels, **channelDensities,** **specificCapacitance,** etc. - - - - - - - - - - - - - - - Variant of membraneProperties with 2 independent Ca pools - - - - - Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction - - - - - - - - Capacitance per unit area - - - - - - - - Explicitly set initial membrane potential for the cell - - - - - - - - The resistivity, or specific axial resistance, of the cytoplasm - - - - - - - - - - - - - - - - - - - - - - - - - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - - - - - - - Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** - - - - - - - - - - - - - - - - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - - - - - - - Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - - - - - - - - - - - - - - - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - - - - - - - Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - - - - - - - - - - - - - - - - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - - - - - - - Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - - - - - - - - - - - - - - - - - - - - - - - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - - - - - - - Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** - - - - - - - - - - - - Same as **channelDensity** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. - - - - - - - - - - - - - - - - - - - - - - - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - - - - - - - Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. - - - - - - - - - - This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github. com/OpenSourceBrain/ghk-nernst. - - - - - - - - - - - - - - - - - - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - - - - - - - Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. - - - - - - - - - - - - - - - - - - - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - - - - - - - Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github. com/OpenSourceBrain/ghk-nernst. - - - + + + + + + To enable connections between populations through gap junctions. + + + + + + + + + + + + + + To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. + + + + + + + + + + + + To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. Includes setting of **weight** for the connection + + + + + + + + + + + + + + A projection between **presynapticPopulation** and **postsynapticPopulation** through components **preComponent** at the start and **postComponent** at the end of a **continuousConnection** or **continuousConnectionInstance** . Can be used for analog synapses. + + + + + + + - + + + - - - - Specifies a **parameter** ( e. g. condDensity ) which can vary its value across a **segmentGroup.** The value is calculated from **value** attribute of the **inhomogeneousValue** subelement. This element is normally a child of **channelDensityNonUniform** , **channelDensityNonUniformNernst** or **channelDensityNonUniformGHK** and is used to calculate the value of the conductance, etc. which will vary on different parts of the cell. The **segmentGroup** specified here needs to define an **inhomogeneousParameter** ( referenced from **inhomogeneousParameter** in the **inhomogeneousValue** ), which calculates a **variable** ( e. g. p ) varying across the cell ( e. g. based on the path length from soma ), which is then used in the **value** attribute of the **inhomogeneousValue** ( so for example condDensity = f( p ) ) - - - - - Specifies the **value** of an **inhomogeneousParameter.** For usage see **variableParameter** - - - - - - - - - - - - - Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now - until LEMS implementation can select by id. TODO: remove. - + + + + + + An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Can be used for analog synapses. + + + + -
- - - - - - Description of a chemical species identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration - - - - - - - - - - - - - - - - - - + + + + + + + + + + An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. + + + + + + + + + + + + An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. Includes setting of **weight** for the connection + + + + + + + + + + + + + + An explicit input ( anything which extends **basePointCurrent** ) to a target cell in a population + + + + + + + + + + + + An explicit list of **input** s to a **population.** + + + + + + + - - + + - - Biophysical properties related to the intracellular space within the **cell** , such as the **resistivity** and the list of ionic **species** present. **caConc** and **caConcExt** are explicitly exposed here to facilitate accessing these values from other Components, even though **caConcExt** is clearly not an intracellular property - - - - - - - - - - Variant of intracellularProperties with 2 independent Ca pools - - - - - - - - - - - - - - - - - + + + + + + + + Specifies a single input to a **target,** optionally giving the **segmentId** ( default 0 ) and **fractionAlong** the segment ( default 0. 5 ). + + + + + + + + + + + + + + Specifies input lists. Can set **weight** to scale individual inputs. + + + + + + + + + + + + + + + + + Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble. org/trac/PyNN/wiki/StandardModels + + + + + + + + + + + + + + + + + Base type of any PyNN standard integrate and fire model + + + + + + + + + + + + + + + + + Base type of conductance based PyNN IaF cell models + + + + + + + + + + + + + + Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic current + + + + + + + + + + + + Leaky integrate and fire model with fixed threshold and decaying-exponential post-synaptic current + + + + + + + + + + + + Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic conductance + + + + + + + + + + + + Leaky integrate and fire model with fixed threshold and exponentially-decaying post-synaptic conductance + + + + + + + + + + + + Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with exponentially-decaying post-synaptic conductance + + + + + + + + + + + + + + + + + Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with alpha-function-shaped post-synaptic conductance + + + + + + + + + + + + Single-compartment Hodgkin-Huxley-type neuron with transient sodium and delayed-rectifier potassium currents using the ion channel models from Traub. + + + + + + + + + + + + + + + + + + + + + Base type for all PyNN synapses. Note, the current **I** produced is dimensionless, but it requires a membrane potential **v** with dimension voltage + + + + + + + + + + + + + Conductance based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) + + + + + + + + + + + + + Alpha synapse: rise time and decay time are both tau_syn. Conductance based synapse. + + + + + + + + + + + + + Current based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) + + + + + + + + + + + + + Alpha synapse: rise time and decay time are both tau_syn. Current based synapse. + + + + + + + + + + + + + Spike source, generating spikes according to a Poisson process. + + + + + + + + + + + + + + + + + + + Base element without ID specified *yet*, e.g. for an element with a particular requirement on its id which does not comply with NmlId (e.g. Segment needs nonNegativeInteger). + + + + + + Anything which can have a unique (within its parent) id, which must be an integer zero or greater. + + + + + + + + + + Anything which can have a unique (within its parent) id of the form NmlId (spaceless combination of letters, numbers and underscore). + + + + + + + + + + Elements which can stand alone and be referenced by id, e.g. cell, morphology. + + + - + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set - - - - - - - - - - - - - Dimensionless equivalent of **pulseGenerator** . Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set - - - - - - - - - - - - - - - - - - - - - - Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set - - - - - - - - - - - - Dimensionless equivalent of **sineGenerator** . Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set - - - - - - - - - - - - - Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set - - - - - - - - - - - - Dimensionless equivalent of **rampGenerator** . Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the dimensionless current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set - - - - - - - - - - - - - Generates a current which is the sum of all its child **basePointCurrent** element, e. g. can be a combination of **pulseGenerator** , **sineGenerator** elements producing a single **i.** Scaled by **weight,** if set - - - - - - - - - - - - - Generates a current which is the sum of all its child **basePointCurrentDL** elements, e. g. can be a combination of **pulseGeneratorDL** , **sineGeneratorDL** elements producing a single **i.** Scaled by **weight,** if set - - - - - - - - - - - - Voltage clamp. Applies a variable current **i** to try to keep parent at **targetVoltage.** Not yet fully tested!!! Consider using voltageClampTriple!! - - - - - - - - - - - - - - - Voltage clamp with 3 clamp levels. Applies a variable current **i** ( through **simpleSeriesResistance** ) to try to keep parent cell at **conditioningVoltage** until time **delay,** **testingVoltage** until **delay** + **duration,** and **returnVoltage** afterwards. Only enabled if **active** = 1. - - - - - - - - - Emits a single spike at the specified **time** - - - - - - - - - - - Set of spike ComponentTypes, each emitting one spike at a certain time. Can be used to feed a predetermined spike train into a cell - - - - - - - - - - - - - Spike array connected to a single **synapse,** producing a current triggered by each **spike** in the array. - - - - - - - - - Simple generator of spikes at a regular interval set by **period** - - - - - - - - - - Generator of spikes with a random interspike interval of at least **minISI** and at most **maxISI** - - - - - - - - - Generator of spikes whose ISI is distributed according to an exponential PDF with scale: 1 / **averageRate** - - - - - - - - Generator of spikes whose ISI distribution is the maximum entropy distribution over [ **minimumISI,** +infinity ) with mean: 1 / **averageRate** - - - - - - - - - - Poisson spike generator firing at **averageRate,** which is connected to single **synapse** that is triggered every time a spike is generated, producing an input current. See also **transientPoissonFiringSynapse** . - - - - - - - - - - - - Poisson spike generator firing at **averageRate** after a **delay** and for a **duration,** connected to single **synapse** that is triggered every time a spike is generated, providing an input current. Similar to ComponentType **poissonFiringSynapse** . - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Network containing: **population** s ( potentially of type **populationList** , and so specifying a list of cell **location** s ); **projection** s ( with lists of **connection** s ) and/or **explicitConnection** s; and **inputList** s ( with lists of **input** s ) and/or **explicitInput** s. Note: often in NeuroML this will be of type **networkWithTemperature** if there are temperature dependent elements ( e. g. ion channels ). - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Initial attempt to specify 3D region for placing cells. Work in progress. . . - - - - - - - - - - - - - - - - - - - - - - - A population of components, with just one parameter for the **size,** i. e. number of components to create. Note: quite often this is used with type= **populationList** which means the size is determined by the number of **instance** s ( with **location** s ) in the list. The **size** attribute is still set, and there will be a validation error if this does not match the number in the list. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Specifies a single instance of a component in a **population** ( placed at **location** ). - - - - - - Specifies the ( x, y, z ) location of a single **instance** of a component in a **population** - - - - - - - - - - - - - - - - - - - - Explicit event connection between named components, which gets processed via a new instance of a **synapse** component which is created on the target component - - - - - - - - - - - Base for projection (set of synaptic connections) between two populations - - - - - - - - - - - - Projection from one population, **presynapticPopulation** to another, **postsynapticPopulation,** through **synapse.** Contains lists of **connection** or **connectionWD** elements. - - - - - - - - - - - - - - - - Base of all synaptic connections (chemical/electrical/analog, etc.) inside projections - - - - - - - - - - - - Base of all synaptic connections with preCellId, postSegmentId, etc. - Note: this is not the best name for these attributes, since Id is superfluous, hence BaseConnectionNewFormat - - - - - - - - - - - - - - - - Base of all synaptic connections with preCell, postSegment, etc. - See BaseConnectionOldFormat - - - - - - - - - - - - - - - - - Event connection directly between named components, which gets processed via a new instance of a **synapse** component which is created on the target component. Normally contained inside a **projection** element. - - - - - - - - - - Event connection between named components, which gets processed via a new instance of a synapse component which is created on the target component, includes setting of **weight** and **delay** for the synaptic connection - - - - - - - - - - - - - A projection between **presynapticPopulation** to another **postsynapticPopulation** through gap junctions. - - - - - - - - - - - - - - - - To enable connections between populations through gap junctions. - - - - - - - - - - - - To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. - - - - - - - - - To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. Includes setting of **weight** for the connection - - - - - - - - - - - - A projection between **presynapticPopulation** and **postsynapticPopulation** through components **preComponent** at the start and **postComponent** at the end of a **continuousConnection** or **continuousConnectionInstance** . Can be used for analog synapses. - - - - - - - - - - - - - - - - An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Can be used for analog synapses. - - - - - - - - - - - - - An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. - - - - - - - - - - An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. Includes setting of **weight** for the connection - - - - - - - - - - - - An explicit input ( anything which extends **basePointCurrent** ) to a target cell in a population - - - - - - - - - - An explicit list of **input** s to a **population.** - - - - - - - - - - - - - - - - - Specifies a single input to a **target,** optionally giving the **segmentId** ( default 0 ) and **fractionAlong** the segment ( default 0. 5 ). - - - - - - - - - - - - Specifies input lists. Can set **weight** to scale individual inputs. - - - - - - - - - - - - - - - - - - - - - - - - - - - Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble. org/trac/PyNN/wiki/StandardModels - - - - - - - - - - - - - - Base type of any PyNN standard integrate and fire model - - - - - - - - - - - Base type of conductance based PyNN IaF cell models - - - - - - - - - - - Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic current - - - - - - - - - Leaky integrate and fire model with fixed threshold and decaying-exponential post-synaptic current - - - - - - - - - Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic conductance - - - - - - - - - Leaky integrate and fire model with fixed threshold and exponentially-decaying post-synaptic conductance - - - - - - - - - - - - - Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with exponentially-decaying post-synaptic conductance - - - - - - - - Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with alpha-function-shaped post-synaptic conductance - - - - - - - - - - - - - - - - - - Single-compartment Hodgkin-Huxley-type neuron with transient sodium and delayed-rectifier potassium currents using the ion channel models from Traub. - - - - - - - - - - - Base type for all PyNN synapses. Note, the current **I** produced is dimensionless, but it requires a membrane potential **v** with dimension voltage - - - - - - - - - - Conductance based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) - - - - - - - - - - Alpha synapse: rise time and decay time are both tau_syn. Conductance based synapse. - - - - - - - - - - Current based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) - - - - - - - - - - Alpha synapse: rise time and decay time are both tau_syn. Current based synapse. - - - - - - - - - - - - - - Spike source, generating spikes according to a Poisson process. - - - - - - - - - - - - Base element without ID specified *yet*, e.g. for an element with a particular requirement on its id which does not comply with NmlId (e.g. Segment needs nonNegativeInteger). - - - - - - - - - - - Anything which can have a unique (within its parent) id, which must be an integer zero or greater. - - - - - - - - - - - - - - - - Anything which can have a unique (within its parent) id of the form NmlId (spaceless combination of letters, numbers and underscore). - - - - - - - - - - - - - - - - - - Elements which can stand alone and be referenced by id, e.g. cell, morphology. - - - - - - - - - - - - - - - - - + + + + From 71c209c4d88fbc05b7a29462c4a8eb3d42f0c70d Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 17 Nov 2021 14:29:11 +0000 Subject: [PATCH 113/136] chore: regenerate nml.py --- neuroml/nml/nml.py | 78 ++++++++++++++++++++-------------------------- 1 file changed, 33 insertions(+), 45 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index ffda0870..569f6f30 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Wed Nov 17 11:10:33 2021 by generateDS.py version 2.40.3. +# Generated Wed Nov 17 14:27:36 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -10867,11 +10867,10 @@ def _buildChildren( class Species(GeneratedsSuper): - """ion -- Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now + """Species -- Description of a chemical species identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration + ion -- Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now until LEMS implementation can select by id. TODO: remove. - * Species -- Description of a chemical species identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration - """ __hash__ = GeneratedsSuper.__hash__ @@ -14302,10 +14301,7 @@ def __str__(self): class BaseWithoutId(GeneratedsSuper): - """BaseWithoutId -- Base element w - ithout ID specified *yet*, e.g. for an element with a particular requirement on its id which does not comply with NmlId (e.g. Segment needs nonNegativeInteger). - - """ + """BaseWithoutId -- Base element without ID specified *yet*, e.g. for an element with a particular requirement on its id which does not comply with NmlId (e.g. Segment needs nonNegativeInteger).""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -24246,15 +24242,14 @@ def _buildChildren( class ChannelDensityGHK2(Base): - """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + """ChannelDensityGHK2 -- Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github. com/OpenSourceBrain/ghk-nernst. + ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). Currently a required attribute. It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. - * ChannelDensityGHK2 -- Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github. com/OpenSourceBrain/ghk-nernst. - """ __hash__ = GeneratedsSuper.__hash__ @@ -24596,15 +24591,14 @@ def _buildChildren( class ChannelDensityGHK(Base): - """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + """ChannelDensityGHK -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). Currently a required attribute. It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. - * ChannelDensityGHK -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. - """ __hash__ = GeneratedsSuper.__hash__ @@ -24944,15 +24938,14 @@ def _buildChildren( class ChannelDensityNernst(Base): - """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + """ChannelDensityNernst -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). Currently a required attribute. It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. - * ChannelDensityNernst -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. - """ __hash__ = GeneratedsSuper.__hash__ @@ -25355,15 +25348,14 @@ def _buildChildren( class ChannelDensity(Base): - """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + """ChannelDensity -- Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** + ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). Currently a required attribute. It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. - * ChannelDensity -- Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** - """ __hash__ = GeneratedsSuper.__hash__ @@ -25834,15 +25826,14 @@ def _buildChildren( class ChannelDensityNonUniformGHK(Base): - """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + """ChannelDensityNonUniformGHK -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). Currently a required attribute. It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. - * ChannelDensityNonUniformGHK -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - """ __hash__ = GeneratedsSuper.__hash__ @@ -26124,15 +26115,14 @@ def _buildChildren( class ChannelDensityNonUniformNernst(Base): - """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + """ChannelDensityNonUniformNernst -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). Currently a required attribute. It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. - * ChannelDensityNonUniformNernst -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - """ __hash__ = GeneratedsSuper.__hash__ @@ -26416,15 +26406,14 @@ def _buildChildren( class ChannelDensityNonUniform(Base): - """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + """ChannelDensityNonUniform -- Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). Currently a required attribute. It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. - * ChannelDensityNonUniform -- Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - """ __hash__ = GeneratedsSuper.__hash__ @@ -26760,15 +26749,14 @@ def _buildChildren( class ChannelPopulation(Base): - """ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + """ChannelPopulation -- Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** + ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). Currently a required attribute. It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel element. TODO: remove. - * ChannelPopulation -- Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** - """ __hash__ = GeneratedsSuper.__hash__ @@ -29499,8 +29487,9 @@ def _buildChildren( class FixedFactorConcentrationModel(Standalone): - """ion -- Should not be required, as it's present on the species element! - FixedFactorConcentrationModel -- Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. + """FixedFactorConcentrationModel -- Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course * + *decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. + ion -- Should not be required, as it's present on the species element! """ @@ -29903,8 +29892,8 @@ def _buildChildren( class DecayingPoolConcentrationModel(Standalone): - """ion -- Should not be required, as it's present on the species element! - DecayingPoolConcentrationModel -- Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** + """DecayingPoolConcentrationModel -- Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** + ion -- Should not be required, as it's present on the species element! """ @@ -42089,14 +42078,13 @@ def _buildChildren( class Cell(BaseCell): - """morphology -- Should only be used if morphology element is outside the cell. + """Cell -- Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. + morphology -- Should only be used if morphology element is outside the cell. This points to the id of the morphology * biophysicalProperties -- Should only be used if biophysicalProperties element is outside the cell. This points to the id of the biophysicalProperties - * Cell -- Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. - """ __hash__ = GeneratedsSuper.__hash__ @@ -44228,8 +44216,8 @@ def _buildChildren( class BaseCellMembPotCap(BaseCell): - """C -- This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007 - BaseCellMembPotCap -- Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) + """BaseCellMembPotCap -- Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) + C -- This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007 """ @@ -45564,8 +45552,8 @@ def _buildChildren( class GradedSynapse(BaseSynapse): - """GradedSynapse -- Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html. - Graded/analog synapse. Based on synapse in Methods of http://www. nature. com/neuro/journal/v7/n12/abs/nn1352. html + """GradedSynapse -- Graded/analog synapse. Based on synapse in Methods of http://www. nature. com/neuro/journal/v7/n12/abs/nn1352. html + Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html. """ @@ -46168,8 +46156,8 @@ def _buildChildren( class SilentSynapse(BaseSynapse): - """SilentSynapse -- Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection (continuousConnection). - Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection. + """SilentSynapse -- Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection. + Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection (continuousConnection). """ From 26864c772cefc5f986df435b5309d650dd210d4c Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 17 Nov 2021 15:11:21 +0000 Subject: [PATCH 114/136] chore: sync XSD --- neuroml/nml/NeuroML_v2.2.xsd | 700 ++++++++++++++++++++++++++++------- 1 file changed, 560 insertions(+), 140 deletions(-) diff --git a/neuroml/nml/NeuroML_v2.2.xsd b/neuroml/nml/NeuroML_v2.2.xsd index 65131696..ad02f40b 100644 --- a/neuroml/nml/NeuroML_v2.2.xsd +++ b/neuroml/nml/NeuroML_v2.2.xsd @@ -198,8 +198,11 @@
- A property ( a **tag** and **value** pair ), which can be on any **baseStandalone** either as a direct child, or within an **Annotation** . Generally something which helps the visual display or facilitates simulation of a Component, but is not a core physiological property. Common examples include: **numberInternalDivisions,** equivalent of nseg in NEURON; **radius,** for a radius to use in graphical displays for abstract cells ( i. e. without defined morphologies ); **color,** the color to use for a **Population** or **populationList** of cells; **recommended_dt_ms,** the recommended timestep to use for simulating a **Network** , **recommended_duration_ms** the recommended duration to use when running a **Network** + A property ( a **tag** and **value** pair ), which can be on any **baseStandalone** either as a direct child, or within an **Annotation** . Generally something which helps the visual display or facilitates simulation of a Component, but is not a core physiological property. Common examples include: **numberInternalDivisions,** equivalent of nseg in NEURON; **radius,** for a radius to use in graphical displays for abstract cells ( i. e. without defined morphologies ); **color,** the color to use for a **Population** or **populationList** of cells; **recommended_dt_ms,** the recommended timestep to use for simulating a **Network** , **recommended_duration_ms** the recommended duration to use when running a **Network** + + + @@ -209,8 +212,11 @@ - A structured annotation containing metadata, specifically RDF or **property** elements + A structured annotation containing metadata, specifically RDF or **property** elements + + + @@ -511,8 +517,11 @@ - A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them + A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them + + + @@ -529,8 +538,11 @@ - Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. + Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. + + + @@ -555,8 +567,11 @@ - Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. + Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. + + + @@ -567,8 +582,11 @@ - Same as **ionChannel** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. + Same as **ionChannel** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. + + + @@ -588,8 +606,11 @@ - A value for the conductance scaling which varies as a standard function of the difference between the current temperature, **temperature,** and the temperature at which the conductance was originally determined, **experimentalTemp** + A value for the conductance scaling which varies as a standard function of the difference between the current temperature, **temperature,** and the temperature at which the conductance was originally determined, **experimentalTemp** + + + @@ -610,8 +631,11 @@ - A **KSState** with **relativeConductance** of 0 + A **KSState** with **relativeConductance** of 0 + + + @@ -623,8 +647,11 @@ - A **KSState** with **relativeConductance** of 1 + A **KSState** with **relativeConductance** of 1 + + + @@ -636,8 +663,11 @@ - A forward only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** + A forward only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** + + + @@ -654,8 +684,11 @@ - A reverse only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** + A reverse only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** + + + @@ -678,8 +711,11 @@ - KS Transition specified in terms of time constant **tau** and steady state **inf** + KS Transition specified in terms of time constant **tau** and steady state **inf** + + + @@ -696,8 +732,11 @@ - A gate which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them + A gate which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them + + + @@ -740,8 +779,11 @@ - Gate which follows the general Hodgkin Huxley formalism + Gate which follows the general Hodgkin Huxley formalism + + + @@ -760,8 +802,11 @@ - Gate which follows the general Hodgkin Huxley formalism + Gate which follows the general Hodgkin Huxley formalism + + + @@ -780,8 +825,11 @@ - Gate which follows the general Hodgkin Huxley formalism + Gate which follows the general Hodgkin Huxley formalism + + + @@ -802,8 +850,11 @@ - Gate which follows the general Hodgkin Huxley formalism + Gate which follows the general Hodgkin Huxley formalism + + + @@ -823,8 +874,11 @@ - Gate which follows the general Hodgkin Huxley formalism + Gate which follows the general Hodgkin Huxley formalism + + + @@ -844,8 +898,11 @@ - Gate which follows the general Hodgkin Huxley formalism but is instantaneous, so tau = 0 and gate follows exactly inf value + Gate which follows the general Hodgkin Huxley formalism but is instantaneous, so tau = 0 and gate follows exactly inf value + + + @@ -862,8 +919,11 @@ - Gate composed of subgates contributing with fractional conductance + Gate composed of subgates contributing with fractional conductance + + + @@ -926,8 +986,11 @@ - Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** + Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** + + + @@ -946,8 +1009,11 @@ - Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. + Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. + + + @@ -969,8 +1035,11 @@ - Base type for all synapses, i. e. ComponentTypes which produce a current ( dimension current ) and change Dynamics in response to an incoming event. cno_0000009 + Base type for all synapses, i. e. ComponentTypes which produce a current ( dimension current ) and change Dynamics in response to an incoming event. cno_0000009 + + + @@ -982,8 +1051,11 @@ - Base type for synapses with a dependence on membrane potential + Base type for synapses with a dependence on membrane potential + + + @@ -995,8 +1067,11 @@ - Synapse model which produces a synaptic current. + Synapse model which produces a synaptic current. + + + @@ -1008,8 +1083,11 @@ - Synapse model which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 + Synapse model which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 + + + @@ -1022,8 +1100,11 @@ - Synapse model suited for a sum of two expTwoSynapses which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 + Synapse model suited for a sum of two expTwoSynapses which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 + + + @@ -1037,8 +1118,11 @@ - Gap junction/single electrical connection + Gap junction/single electrical connection + + + @@ -1053,8 +1137,11 @@ - Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection. + Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection. + + + @@ -1069,8 +1156,11 @@ - Behaves just like a one way gap junction. + Behaves just like a one way gap junction. + + + @@ -1085,8 +1175,11 @@ - Graded/analog synapse. Based on synapse in Methods of http://www. nature. com/neuro/journal/v7/n12/abs/nn1352. html + Graded/analog synapse. Based on synapse in Methods of http://www. nature. com/neuro/journal/v7/n12/abs/nn1352. html + + + @@ -1105,8 +1198,11 @@ - Alpha current synapse: rise time and decay time are both **tau.** + Alpha current synapse: rise time and decay time are both **tau.** + + + @@ -1119,8 +1215,11 @@ - Ohmic synapse model where rise time and decay time are both **tau.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** + Ohmic synapse model where rise time and decay time are both **tau.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** + + + @@ -1132,8 +1231,11 @@ - Ohmic synapse model whose conductance rises instantaneously by ( **gbase** * **weight** ) on receiving an event, and which decays exponentially to zero with time course **tauDecay** + Ohmic synapse model whose conductance rises instantaneously by ( **gbase** * **weight** ) on receiving an event, and which decays exponentially to zero with time course **tauDecay** + + + @@ -1145,8 +1247,11 @@ - Ohmic synapse model whose conductance waveform on receiving an event has a rise time of **tauRise** and a decay time of **tauDecay.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** + Ohmic synapse model whose conductance waveform on receiving an event has a rise time of **tauRise** and a decay time of **tauDecay.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** + + + @@ -1159,8 +1264,11 @@ - Ohmic synapse similar to expTwoSynapse but consisting of two components that can differ in decay times and max conductances but share the same rise time. + Ohmic synapse similar to expTwoSynapse but consisting of two components that can differ in decay times and max conductances but share the same rise time. + + + @@ -1174,8 +1282,11 @@ - Synapse consisting of two independent synaptic mechanisms ( e. g. AMPA-R and NMDA-R ), which can be easily colocated in connections + Synapse consisting of two independent synaptic mechanisms ( e. g. AMPA-R and NMDA-R ), which can be easily colocated in connections + + + @@ -1190,8 +1301,11 @@ - Biexponential synapse that allows for optional block and plasticity mechanisms, which can be expressed as child elements. + Biexponential synapse that allows for optional block and plasticity mechanisms, which can be expressed as child elements. + + + @@ -1233,8 +1347,11 @@ - Base type of any cell ( e. g. point neuron like **izhikevich2007Cell** , or a morphologically detailed **Cell** with **segment** s ) which can be used in a **population** + Base type of any cell ( e. g. point neuron like **izhikevich2007Cell** , or a morphologically detailed **Cell** with **segment** s ) which can be used in a **population** + + + @@ -1245,8 +1362,11 @@ - Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time constant **tau** + Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time constant **tau** + + + @@ -1261,8 +1381,11 @@ - Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time course **tau.** It has a refractory period of **refract** after spiking + Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time course **tau.** It has a refractory period of **refract** after spiking + + + @@ -1274,8 +1397,11 @@ - Integrate and fire cell with capacitance **C,** **leakConductance** and **leakReversal** + Integrate and fire cell with capacitance **C,** **leakConductance** and **leakReversal** + + + @@ -1291,8 +1417,11 @@ - Integrate and fire cell with capacitance **C,** **leakConductance,** **leakReversal** and refractory period **refract** + Integrate and fire cell with capacitance **C,** **leakConductance,** **leakReversal** and refractory period **refract** + + + @@ -1304,8 +1433,11 @@ - Cell based on the 2003 model of Izhikevich, see http://izhikevich. org/publications/spikes. htm + Cell based on the 2003 model of Izhikevich, see http://izhikevich. org/publications/spikes. htm + + + @@ -1322,8 +1454,11 @@ - Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) + Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) + + + @@ -1342,8 +1477,11 @@ - Cell based on the modified Izhikevich model in Izhikevich 2007, Dynamical systems in neuroscience, MIT Press + Cell based on the modified Izhikevich model in Izhikevich 2007, Dynamical systems in neuroscience, MIT Press + + + @@ -1363,8 +1501,11 @@ - Model based on Brette R and Gerstner W ( 2005 ) Adaptive Exponential Integrate-and-Fire Model as an Effective Description of Neuronal Activity. J Neurophysiol 94:3637-3642 + Model based on Brette R and Gerstner W ( 2005 ) Adaptive Exponential Integrate-and-Fire Model as an Effective Description of Neuronal Activity. J Neurophysiol 94:3637-3642 + + + @@ -1385,8 +1526,11 @@ - Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github. com/NeuroML/NeuroML2/issues/42 ) + Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github. com/NeuroML/NeuroML2/issues/42 ) + + + @@ -1398,8 +1542,11 @@ - The Fitzhugh Nagumo model is a two-dimensional simplification of the Hodgkin-Huxley model of spike generation in squid giant axons. This system was suggested by FitzHugh ( FitzHugh R. [1961]: Impulses and physiological states in theoretical models of nerve membrane. Biophysical J. 1:445-466 ), who called it " Bonhoeffer-van der Pol model ", and the equivalent circuit by Nagumo et al. ( Nagumo J. , Arimoto S. , and Yoshizawa S. [1962] An active pulse transmission line simulating nerve axon. Proc IRE. 50:2061-2070. 1962 ). This version corresponds to the one described in FitzHugh R. [1969]: Mathematical models of excitation and propagation in nerve. Chapter 1 ( pp. 1-85 in H. P. Schwan, ed. Biological Engineering, McGraw-Hill Book Co. , N. Y. ) + The Fitzhugh Nagumo model is a two-dimensional simplification of the Hodgkin-Huxley model of spike generation in squid giant axons. This system was suggested by FitzHugh ( FitzHugh R. [1961]: Impulses and physiological states in theoretical models of nerve membrane. Biophysical J. 1:445-466 ), who called it " Bonhoeffer-van der Pol model ", and the equivalent circuit by Nagumo et al. ( Nagumo J. , Arimoto S. , and Yoshizawa S. [1962] An active pulse transmission line simulating nerve axon. Proc IRE. 50:2061-2070. 1962 ). This version corresponds to the one described in FitzHugh R. [1969]: Mathematical models of excitation and propagation in nerve. Chapter 1 ( pp. 1-85 in H. P. Schwan, ed. Biological Engineering, McGraw-Hill Book Co. , N. Y. ) + + + @@ -1416,8 +1563,11 @@ - Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github. com/OpenSourceBrain/PinskyRinzelModel + Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github. com/OpenSourceBrain/PinskyRinzelModel + + + @@ -1449,8 +1599,11 @@ - Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. + Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. + + + @@ -1479,8 +1632,11 @@ - Variant of cell with two independent Ca2+ pools. Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. + Variant of cell with two independent Ca2+ pools. Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. + + + @@ -1494,8 +1650,11 @@ - The collection of **segment** s which specify the 3D structure of the cell, along with a number of **segmentGroup** s + The collection of **segment** s which specify the 3D structure of the cell, along with a number of **segmentGroup** s + + + @@ -1511,8 +1670,11 @@ - A segment defines the smallest unit within a possibly branching structure ( **morphology** ), such as a dendrite or axon. Its **id** should be a nonnegative integer ( usually soma/root = 0 ). Its end points are given by the **proximal** and **distal** points. The **proximal** point can be omitted, usually because it is the same as a point on the **parent** segment, see **proximal** for details. **parent** specifies the parent segment. The first segment of a **cell** ( with no **parent** ) usually represents the soma. The shape is normally a cylinder ( radii of the **proximal** and **distal** equal, but positions different ) or a conical frustum ( radii and positions different ). If the x, y, x positions of the **proximal** and **distal** are equal, the segment can be interpreted as a sphere, and in this case the radii of these points must be equal. NOTE: LEMS does not yet support multicompartmental modelling, so the Dynamics here is only appropriate for single compartment modelling. + A segment defines the smallest unit within a possibly branching structure ( **morphology** ), such as a dendrite or axon. Its **id** should be a nonnegative integer ( usually soma/root = 0 ). Its end points are given by the **proximal** and **distal** points. The **proximal** point can be omitted, usually because it is the same as a point on the **parent** segment, see **proximal** for details. **parent** specifies the parent segment. The first segment of a **cell** ( with no **parent** ) usually represents the soma. The shape is normally a cylinder ( radii of the **proximal** and **distal** equal, but positions different ) or a conical frustum ( radii and positions different ). If the x, y, x positions of the **proximal** and **distal** are equal, the segment can be interpreted as a sphere, and in this case the radii of these points must be equal. NOTE: LEMS does not yet support multicompartmental modelling, so the Dynamics here is only appropriate for single compartment modelling. + + + @@ -1534,8 +1696,11 @@ - Base type for ComponentTypes which specify an ( **x,** **y,** **z** ) coordinate along with a **diameter.** Note: no dimension used in the attributes for these coordinates! These are assumed to have dimension micrometer ( 10^-6 m ). This is due to micrometers being the default option for the majority of neuronal morphology formats, and dimensions are omitted here to facilitate reading and writing of morphologies in NeuroML. + Base type for ComponentTypes which specify an ( **x,** **y,** **z** ) coordinate along with a **diameter.** Note: no dimension used in the attributes for these coordinates! These are assumed to have dimension micrometer ( 10^-6 m ). This is due to micrometers being the default option for the majority of neuronal morphology formats, and dimensions are omitted here to facilitate reading and writing of morphologies in NeuroML. + + + @@ -1547,8 +1712,11 @@ - A method to describe a group of **segment** s in a **morphology** , e. g. soma_group, dendrite_group, axon_group. While a name is useful to describe the group, the **neuroLexId** attribute can be used to explicitly specify the meaning of the group, e. g. sao1044911821 for 'Neuronal Cell Body', sao1211023249 for 'Dendrite'. The **segment** s in this group can be specified as: a list of individual **member** segments; a **path** , all of the segments along which should be included; a **subTree** of the **cell** to include; other segmentGroups to **include** ( so all segments from those get included here ). An **inhomogeneousParameter** can be defined on the region of the cell specified by this group ( see **variableParameter** for usage ). + A method to describe a group of **segment** s in a **morphology** , e. g. soma_group, dendrite_group, axon_group. While a name is useful to describe the group, the **neuroLexId** attribute can be used to explicitly specify the meaning of the group, e. g. sao1044911821 for 'Neuronal Cell Body', sao1211023249 for 'Dendrite'. The **segment** s in this group can be specified as: a list of individual **member** segments; a **path** , all of the segments along which should be included; a **subTree** of the **cell** to include; other segmentGroups to **include** ( so all segments from those get included here ). An **inhomogeneousParameter** can be defined on the region of the cell specified by this group ( see **variableParameter** for usage ). + + + @@ -1569,8 +1737,11 @@ - An inhomogeneous parameter specified across the **segmentGroup** ( see **variableParameter** for usage ). + An inhomogeneous parameter specified across the **segmentGroup** ( see **variableParameter** for usage ). + + + @@ -1601,8 +1772,11 @@ - A single identified **segment** which is part of the **segmentGroup** + A single identified **segment** which is part of the **segmentGroup** + + + @@ -1610,8 +1784,11 @@ - Include all members of another **segmentGroup** in this group + Include all members of another **segmentGroup** in this group + + + @@ -1619,8 +1796,11 @@ - Include all the **segment** s between those specified by **from** and **to** , inclusive + Include all the **segment** s between those specified by **from** and **to** , inclusive + + + @@ -1631,8 +1811,11 @@ - Include all the **segment** s distal to that specified by **from** in the **segmentGroup** + Include all the **segment** s distal to that specified by **from** in the **segmentGroup** + + + @@ -1649,8 +1832,11 @@ - The biophysical properties of the **cell** , including the **membraneProperties** and the **intracellularProperties** + The biophysical properties of the **cell** , including the **membraneProperties** and the **intracellularProperties** + + + @@ -1667,8 +1853,11 @@ - The biophysical properties of the **cell** , including the **membraneProperties2CaPools** and the **intracellularProperties2CaPools** for a cell with two Ca pools + The biophysical properties of the **cell** , including the **membraneProperties2CaPools** and the **intracellularProperties2CaPools** for a cell with two Ca pools + + + @@ -1685,8 +1874,11 @@ - Properties specific to the membrane, such as the **populations** of channels, **channelDensities,** **specificCapacitance,** etc. + Properties specific to the membrane, such as the **populations** of channels, **channelDensities,** **specificCapacitance,** etc. + + + @@ -1709,8 +1901,11 @@ - Variant of membraneProperties with 2 independent Ca pools + Variant of membraneProperties with 2 independent Ca pools + + + @@ -1725,8 +1920,11 @@ - Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction + Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction + + + @@ -1736,8 +1934,11 @@ - Capacitance per unit area + Capacitance per unit area + + + @@ -1747,8 +1948,11 @@ - Explicitly set initial membrane potential for the cell + Explicitly set initial membrane potential for the cell + + + @@ -1758,8 +1962,11 @@ - The resistivity, or specific axial resistance, of the cytoplasm + The resistivity, or specific axial resistance, of the cytoplasm + + + @@ -1769,8 +1976,11 @@ - Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** + Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** + + + @@ -1801,8 +2011,11 @@ - Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + + + @@ -1829,8 +2042,11 @@ - Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + + + @@ -1856,8 +2072,11 @@ - Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + + + @@ -1883,8 +2102,11 @@ - Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** + Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** + + + @@ -1915,8 +2137,11 @@ - Same as **channelDensity** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. + Same as **channelDensity** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. + + + @@ -1928,8 +2153,11 @@ - Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + + + @@ -1959,8 +2187,11 @@ - This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github. com/OpenSourceBrain/ghk-nernst. + This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github. com/OpenSourceBrain/ghk-nernst. + + + @@ -1972,8 +2203,11 @@ - Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + + + @@ -2000,8 +2234,11 @@ - Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github. com/OpenSourceBrain/ghk-nernst. + Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github. com/OpenSourceBrain/ghk-nernst. + + + - Description of a chemical species identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration + Description of a chemical species identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration + + + @@ -2092,8 +2338,11 @@ - Biophysical properties related to the intracellular space within the **cell** , such as the **resistivity** and the list of ionic **species** present. **caConc** and **caConcExt** are explicitly exposed here to facilitate accessing these values from other Components, even though **caConcExt** is clearly not an intracellular property + Biophysical properties related to the intracellular space within the **cell** , such as the **resistivity** and the list of ionic **species** present. **caConc** and **caConcExt** are explicitly exposed here to facilitate accessing these values from other Components, even though **caConcExt** is clearly not an intracellular property + + + @@ -2104,8 +2353,11 @@ - Variant of intracellularProperties with 2 independent Ca pools + Variant of intracellularProperties with 2 independent Ca pools + + + @@ -2151,8 +2403,11 @@ - Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set + Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set + + + @@ -2167,8 +2422,11 @@ - Dimensionless equivalent of **pulseGenerator** . Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set + Dimensionless equivalent of **pulseGenerator** . Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set + + + @@ -2183,8 +2441,11 @@ - Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set + Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set + + + @@ -2200,8 +2461,11 @@ - Dimensionless equivalent of **sineGenerator** . Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set + Dimensionless equivalent of **sineGenerator** . Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set + + + @@ -2217,8 +2481,11 @@ - Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set + Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set + + + @@ -2234,8 +2501,11 @@ - Dimensionless equivalent of **rampGenerator** . Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the dimensionless current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set + Dimensionless equivalent of **rampGenerator** . Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the dimensionless current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set + + + @@ -2251,8 +2521,11 @@ - Generates a current which is the sum of all its child **basePointCurrent** element, e. g. can be a combination of **pulseGenerator** , **sineGenerator** elements producing a single **i.** Scaled by **weight,** if set + Generates a current which is the sum of all its child **basePointCurrent** element, e. g. can be a combination of **pulseGenerator** , **sineGenerator** elements producing a single **i.** Scaled by **weight,** if set + + + @@ -2268,8 +2541,11 @@ - Generates a current which is the sum of all its child **basePointCurrentDL** elements, e. g. can be a combination of **pulseGeneratorDL** , **sineGeneratorDL** elements producing a single **i.** Scaled by **weight,** if set + Generates a current which is the sum of all its child **basePointCurrentDL** elements, e. g. can be a combination of **pulseGeneratorDL** , **sineGeneratorDL** elements producing a single **i.** Scaled by **weight,** if set + + + @@ -2285,8 +2561,11 @@ - Voltage clamp. Applies a variable current **i** to try to keep parent at **targetVoltage.** Not yet fully tested!!! Consider using voltageClampTriple!! + Voltage clamp. Applies a variable current **i** to try to keep parent at **targetVoltage.** Not yet fully tested!!! Consider using voltageClampTriple!! + + + @@ -2301,8 +2580,11 @@ - Voltage clamp with 3 clamp levels. Applies a variable current **i** ( through **simpleSeriesResistance** ) to try to keep parent cell at **conditioningVoltage** until time **delay,** **testingVoltage** until **delay** + **duration,** and **returnVoltage** afterwards. Only enabled if **active** = 1. + Voltage clamp with 3 clamp levels. Applies a variable current **i** ( through **simpleSeriesResistance** ) to try to keep parent cell at **conditioningVoltage** until time **delay,** **testingVoltage** until **delay** + **duration,** and **returnVoltage** afterwards. Only enabled if **active** = 1. + + + @@ -2320,8 +2602,11 @@ - Emits a single spike at the specified **time** + Emits a single spike at the specified **time** + + + @@ -2333,8 +2618,11 @@ - Set of spike ComponentTypes, each emitting one spike at a certain time. Can be used to feed a predetermined spike train into a cell + Set of spike ComponentTypes, each emitting one spike at a certain time. Can be used to feed a predetermined spike train into a cell + + + @@ -2348,8 +2636,11 @@ - Spike array connected to a single **synapse,** producing a current triggered by each **spike** in the array. + Spike array connected to a single **synapse,** producing a current triggered by each **spike** in the array. + + + @@ -2365,8 +2656,11 @@ - Simple generator of spikes at a regular interval set by **period** + Simple generator of spikes at a regular interval set by **period** + + + @@ -2378,8 +2672,11 @@ - Generator of spikes with a random interspike interval of at least **minISI** and at most **maxISI** + Generator of spikes with a random interspike interval of at least **minISI** and at most **maxISI** + + + @@ -2392,8 +2689,11 @@ - Generator of spikes whose ISI is distributed according to an exponential PDF with scale: 1 / **averageRate** + Generator of spikes whose ISI is distributed according to an exponential PDF with scale: 1 / **averageRate** + + + @@ -2405,8 +2705,11 @@ - Generator of spikes whose ISI distribution is the maximum entropy distribution over [ **minimumISI,** +infinity ) with mean: 1 / **averageRate** + Generator of spikes whose ISI distribution is the maximum entropy distribution over [ **minimumISI,** +infinity ) with mean: 1 / **averageRate** + + + @@ -2418,8 +2721,11 @@ - Poisson spike generator firing at **averageRate,** which is connected to single **synapse** that is triggered every time a spike is generated, producing an input current. See also **transientPoissonFiringSynapse** . + Poisson spike generator firing at **averageRate,** which is connected to single **synapse** that is triggered every time a spike is generated, producing an input current. See also **transientPoissonFiringSynapse** . + + + @@ -2433,8 +2739,11 @@ - Poisson spike generator firing at **averageRate** after a **delay** and for a **duration,** connected to single **synapse** that is triggered every time a spike is generated, providing an input current. Similar to ComponentType **poissonFiringSynapse** . + Poisson spike generator firing at **averageRate** after a **delay** and for a **duration,** connected to single **synapse** that is triggered every time a spike is generated, providing an input current. Similar to ComponentType **poissonFiringSynapse** . + + + @@ -2453,8 +2762,11 @@ - Network containing: **population** s ( potentially of type **populationList** , and so specifying a list of cell **location** s ); **projection** s ( with lists of **connection** s ) and/or **explicitConnection** s; and **inputList** s ( with lists of **input** s ) and/or **explicitInput** s. Note: often in NeuroML this will be of type **networkWithTemperature** if there are temperature dependent elements ( e. g. ion channels ). + Network containing: **population** s ( potentially of type **populationList** , and so specifying a list of cell **location** s ); **projection** s ( with lists of **connection** s ) and/or **explicitConnection** s; and **inputList** s ( with lists of **input** s ) and/or **explicitInput** s. Note: often in NeuroML this will be of type **networkWithTemperature** if there are temperature dependent elements ( e. g. ion channels ). + + + @@ -2524,8 +2836,11 @@ - Initial attempt to specify 3D region for placing cells. Work in progress. . . + Initial attempt to specify 3D region for placing cells. Work in progress. . . + + + @@ -2541,8 +2856,11 @@ - A population of components, with just one parameter for the **size,** i. e. number of components to create. Note: quite often this is used with type= **populationList** which means the size is determined by the number of **instance** s ( with **location** s ) in the list. The **size** attribute is still set, and there will be a validation error if this does not match the number in the list. + A population of components, with just one parameter for the **size,** i. e. number of components to create. Note: quite often this is used with type= **populationList** which means the size is determined by the number of **instance** s ( with **location** s ) in the list. The **size** attribute is still set, and there will be a validation error if this does not match the number in the list. + + + @@ -2589,8 +2907,11 @@ - Specifies a single instance of a component in a **population** ( placed at **location** ). + Specifies a single instance of a component in a **population** ( placed at **location** ). + + + @@ -2607,8 +2928,11 @@ - Specifies the ( x, y, z ) location of a single **instance** of a component in a **population** + Specifies the ( x, y, z ) location of a single **instance** of a component in a **population** + + + @@ -2629,8 +2953,11 @@ - Explicit event connection between named components, which gets processed via a new instance of a **synapse** component which is created on the target component + Explicit event connection between named components, which gets processed via a new instance of a **synapse** component which is created on the target component + + + @@ -2653,8 +2980,11 @@ - Projection from one population, **presynapticPopulation** to another, **postsynapticPopulation,** through **synapse.** Contains lists of **connection** or **connectionWD** elements. + Projection from one population, **presynapticPopulation** to another, **postsynapticPopulation,** through **synapse.** Contains lists of **connection** or **connectionWD** elements. + + + @@ -2713,8 +3043,11 @@ - Event connection directly between named components, which gets processed via a new instance of a **synapse** component which is created on the target component. Normally contained inside a **projection** element. + Event connection directly between named components, which gets processed via a new instance of a **synapse** component which is created on the target component. Normally contained inside a **projection** element. + + + @@ -2726,8 +3059,11 @@ - Event connection between named components, which gets processed via a new instance of a synapse component which is created on the target component, includes setting of **weight** and **delay** for the synaptic connection + Event connection between named components, which gets processed via a new instance of a synapse component which is created on the target component, includes setting of **weight** and **delay** for the synaptic connection + + + @@ -2741,8 +3077,11 @@ - A projection between **presynapticPopulation** to another **postsynapticPopulation** through gap junctions. + A projection between **presynapticPopulation** to another **postsynapticPopulation** through gap junctions. + + + @@ -2759,8 +3098,11 @@ - To enable connections between populations through gap junctions. + To enable connections between populations through gap junctions. + + + @@ -2773,8 +3115,11 @@ - To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. + To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. + + + @@ -2785,8 +3130,11 @@ - To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. Includes setting of **weight** for the connection + To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. Includes setting of **weight** for the connection + + + @@ -2799,8 +3147,11 @@ - A projection between **presynapticPopulation** and **postsynapticPopulation** through components **preComponent** at the start and **postComponent** at the end of a **continuousConnection** or **continuousConnectionInstance** . Can be used for analog synapses. + A projection between **presynapticPopulation** and **postsynapticPopulation** through components **preComponent** at the start and **postComponent** at the end of a **continuousConnection** or **continuousConnectionInstance** . Can be used for analog synapses. + + + @@ -2817,8 +3168,11 @@ - An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Can be used for analog synapses. + An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Can be used for analog synapses. + + + @@ -2832,8 +3186,11 @@ - An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. + An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. + + + @@ -2844,8 +3201,11 @@ - An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. Includes setting of **weight** for the connection + An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. Includes setting of **weight** for the connection + + + @@ -2858,8 +3218,11 @@ - An explicit input ( anything which extends **basePointCurrent** ) to a target cell in a population + An explicit input ( anything which extends **basePointCurrent** ) to a target cell in a population + + + @@ -2870,8 +3233,11 @@ - An explicit list of **input** s to a **population.** + An explicit list of **input** s to a **population.** + + + @@ -2889,8 +3255,11 @@ - Specifies a single input to a **target,** optionally giving the **segmentId** ( default 0 ) and **fractionAlong** the segment ( default 0. 5 ). + Specifies a single input to a **target,** optionally giving the **segmentId** ( default 0 ) and **fractionAlong** the segment ( default 0. 5 ). + + + @@ -2903,8 +3272,11 @@ - Specifies input lists. Can set **weight** to scale individual inputs. + Specifies input lists. Can set **weight** to scale individual inputs. + + + @@ -2920,8 +3292,11 @@ - Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble. org/trac/PyNN/wiki/StandardModels + Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble. org/trac/PyNN/wiki/StandardModels + + + @@ -2937,8 +3312,11 @@ - Base type of any PyNN standard integrate and fire model + Base type of any PyNN standard integrate and fire model + + + @@ -2954,8 +3332,11 @@ - Base type of conductance based PyNN IaF cell models + Base type of conductance based PyNN IaF cell models + + + @@ -2968,8 +3349,11 @@ - Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic current + Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic current + + + @@ -2980,8 +3364,11 @@ - Leaky integrate and fire model with fixed threshold and decaying-exponential post-synaptic current + Leaky integrate and fire model with fixed threshold and decaying-exponential post-synaptic current + + + @@ -2992,8 +3379,11 @@ - Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic conductance + Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic conductance + + + @@ -3004,8 +3394,11 @@ - Leaky integrate and fire model with fixed threshold and exponentially-decaying post-synaptic conductance + Leaky integrate and fire model with fixed threshold and exponentially-decaying post-synaptic conductance + + + @@ -3016,8 +3409,11 @@ - Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with exponentially-decaying post-synaptic conductance + Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with exponentially-decaying post-synaptic conductance + + + @@ -3033,8 +3429,11 @@ - Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with alpha-function-shaped post-synaptic conductance + Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with alpha-function-shaped post-synaptic conductance + + + @@ -3045,8 +3444,11 @@ - Single-compartment Hodgkin-Huxley-type neuron with transient sodium and delayed-rectifier potassium currents using the ion channel models from Traub. + Single-compartment Hodgkin-Huxley-type neuron with transient sodium and delayed-rectifier potassium currents using the ion channel models from Traub. + + + @@ -3066,8 +3468,11 @@ - Base type for all PyNN synapses. Note, the current **I** produced is dimensionless, but it requires a membrane potential **v** with dimension voltage + Base type for all PyNN synapses. Note, the current **I** produced is dimensionless, but it requires a membrane potential **v** with dimension voltage + + + @@ -3079,8 +3484,11 @@ - Conductance based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) + Conductance based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) + + + @@ -3092,8 +3500,11 @@ - Alpha synapse: rise time and decay time are both tau_syn. Conductance based synapse. + Alpha synapse: rise time and decay time are both tau_syn. Conductance based synapse. + + + @@ -3105,8 +3516,11 @@ - Current based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) + Current based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) + + + @@ -3118,8 +3532,11 @@ - Alpha synapse: rise time and decay time are both tau_syn. Current based synapse. + Alpha synapse: rise time and decay time are both tau_syn. Current based synapse. + + + @@ -3131,8 +3548,11 @@ - Spike source, generating spikes according to a Poisson process. + Spike source, generating spikes according to a Poisson process. + + + From 70807201d38b5b0b0e6f2e70f832e6804e74d92a Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 17 Nov 2021 15:11:56 +0000 Subject: [PATCH 115/136] chore: regenerate nml.py --- neuroml/nml/nml.py | 133 +++++++++++++++++++++++++-------------------- 1 file changed, 74 insertions(+), 59 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 569f6f30..33c4ce1b 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Wed Nov 17 14:27:36 2021 by generateDS.py version 2.40.3. +# Generated Wed Nov 17 15:09:41 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -10868,8 +10868,9 @@ def _buildChildren( class Species(GeneratedsSuper): """Species -- Description of a chemical species identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration - ion -- Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now - until LEMS implementation can select by id. TODO: remove. + + * ion -- Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now + until LEMS implementation can select by id. TODO: remove. """ @@ -24243,12 +24244,13 @@ def _buildChildren( class ChannelDensityGHK2(Base): """ChannelDensityGHK2 -- Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github. com/OpenSourceBrain/ghk-nernst. - ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + + * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. """ @@ -24592,12 +24594,13 @@ def _buildChildren( class ChannelDensityGHK(Base): """ChannelDensityGHK -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. - ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + + * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. """ @@ -24939,12 +24942,13 @@ def _buildChildren( class ChannelDensityNernst(Base): """ChannelDensityNernst -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. - ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + + * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. """ @@ -25349,12 +25353,13 @@ def _buildChildren( class ChannelDensity(Base): """ChannelDensity -- Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** - ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + + * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. """ @@ -25827,12 +25832,13 @@ def _buildChildren( class ChannelDensityNonUniformGHK(Base): """ChannelDensityNonUniformGHK -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + + * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. """ @@ -26116,12 +26122,13 @@ def _buildChildren( class ChannelDensityNonUniformNernst(Base): """ChannelDensityNonUniformNernst -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + + * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. """ @@ -26407,12 +26414,13 @@ def _buildChildren( class ChannelDensityNonUniform(Base): """ChannelDensityNonUniform -- Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + + * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. """ @@ -26750,12 +26758,13 @@ def _buildChildren( class ChannelPopulation(Base): """ChannelPopulation -- Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** - ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + + * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here + TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. + Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). + Currently a required attribute. + It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel + element. TODO: remove. """ @@ -29487,9 +29496,9 @@ def _buildChildren( class FixedFactorConcentrationModel(Standalone): - """FixedFactorConcentrationModel -- Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course * - *decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. - ion -- Should not be required, as it's present on the species element! + """FixedFactorConcentrationModel -- Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. + + * ion -- Should not be required, as it's present on the species element! """ @@ -29893,7 +29902,8 @@ def _buildChildren( class DecayingPoolConcentrationModel(Standalone): """DecayingPoolConcentrationModel -- Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** - ion -- Should not be required, as it's present on the species element! + + * ion -- Should not be required, as it's present on the species element! """ @@ -41666,7 +41676,10 @@ def _buildChildren( class ChannelDensityNernstCa2(ChannelDensityNernst): - """ChannelDensityNernstCa2 -- This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github. com/OpenSourceBrain/ghk-nernst.""" + """ChannelDensityNernstCa2 -- This component is similar to the original compon + ent type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github. com/OpenSourceBrain/ghk-nernst. + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -42079,8 +42092,9 @@ def _buildChildren( class Cell(BaseCell): """Cell -- Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. - morphology -- Should only be used if morphology element is outside the cell. - This points to the id of the morphology + + * morphology -- Should only be used if morphology element is outside the cell. + This points to the id of the morphology * biophysicalProperties -- Should only be used if biophysicalProperties element is outside the cell. This points to the id of the biophysicalProperties @@ -44217,7 +44231,8 @@ def _buildChildren( class BaseCellMembPotCap(BaseCell): """BaseCellMembPotCap -- Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) - C -- This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007 + + * C -- This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007 """ From 492d59f48cbd3e2b9df9914efa958f2af0c87dea Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 17 Nov 2021 17:21:51 +0000 Subject: [PATCH 116/136] chore: sync XSD --- neuroml/nml/NeuroML_v2.2.xsd | 1162 ++++++++++++++++++++++++++++------ 1 file changed, 968 insertions(+), 194 deletions(-) diff --git a/neuroml/nml/NeuroML_v2.2.xsd b/neuroml/nml/NeuroML_v2.2.xsd index ad02f40b..e719f3f1 100644 --- a/neuroml/nml/NeuroML_v2.2.xsd +++ b/neuroml/nml/NeuroML_v2.2.xsd @@ -593,7 +593,11 @@ - + + + (dimension: voltage) + + @@ -613,8 +617,16 @@ - - + + + (dimension: none) + + + + + (dimension: temperature) + + @@ -1000,9 +1012,21 @@ Should not be required, as it's present on the species element! - - - + + + (dimension: concentration) + + + + + (dimension: time) + + + + + (dimension: length) + + @@ -1023,9 +1047,21 @@ Should not be required, as it's present on the species element! - - - + + + (dimension: concentration) + + + + + (dimension: time) + + + + + (dimension: rho_factor) + + @@ -1092,8 +1128,16 @@ - - + + + (dimension: conductance): Baseline conductance, generally the maximum conductance following a single spike + + + + + (dimension: voltage): Reversal potential of the synapse + + @@ -1109,9 +1153,21 @@ - - - + + + (dimension: conductance): Baseline conductance 1 + + + + + (dimension: conductance): Baseline conductance 2 + + + + + (dimension: voltage): Reversal potential of the synapse + + @@ -1130,7 +1186,11 @@ Gap junction/single electrical connection - + + + (dimension: conductance) + + @@ -1168,7 +1228,11 @@ Behaves just like a one way gap junction. - + + + (dimension: conductance) + + @@ -1187,11 +1251,31 @@ Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html. - - - - - + + + (dimension: conductance) + + + + + (dimension: voltage): Slope of the activation curve + + + + + (dimension: voltage): The half-activation voltage of the synapse + + + + + (dimension: per_time): Rate constant for transmitter-receptor dissociation rate + + + + + (dimension: voltage): The reversal potential of the synapse + + @@ -1207,8 +1291,16 @@ - - + + + (dimension: time): Time course for rise and decay + + + + + (dimension: current): Baseline current increase after receiving a spike + + @@ -1224,7 +1316,11 @@ - + + + (dimension: time): Time course of rise/decay + + @@ -1240,7 +1336,11 @@ - + + + (dimension: time): Time course of decay + + @@ -1256,8 +1356,16 @@ - - + + + (dimension: time) + + + + + (dimension: time) + + @@ -1273,9 +1381,21 @@ - - - + + + (dimension: time) + + + + + (dimension: time) + + + + + (dimension: time) + + @@ -1371,10 +1491,18 @@ - + + + (dimension: voltage) + + - + + + (dimension: time) + + @@ -1390,7 +1518,11 @@ - + + + (dimension: time) + + @@ -1406,11 +1538,19 @@ - + + + (dimension: voltage) + + - + + + (dimension: conductance) + + @@ -1426,7 +1566,11 @@ - + + + (dimension: time) + + @@ -1442,12 +1586,36 @@ - - - - - - + + + (dimension: voltage): Initial membrane potential + + + + + (dimension: voltage): Spike threshold + + + + + (dimension: none): Time scale of the recovery variable U + + + + + (dimension: none): Sensitivity of U to the subthreshold fluctuations of the membrane potential v + + + + + (dimension: none): After-spike reset value of v + + + + + (dimension: none): After-spike reset of u + + @@ -1465,7 +1633,9 @@ - This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007 + (dimension: capacitance): Total capacitance of the cell membrane + + @@ -1486,15 +1656,51 @@ - - - - - - - - - + + + (dimension: voltage) + + + + + (dimension: conductance_per_voltage) + + + + + (dimension: voltage) + + + + + (dimension: voltage) + + + + + (dimension: voltage) + + + + + (dimension: per_time) + + + + + (dimension: conductance) + + + + + (dimension: voltage) + + + + + (dimension: current) + + @@ -1510,16 +1716,56 @@ - - - - - - - - - - + + + (dimension: conductance) + + + + + (dimension: voltage) + + + + + (dimension: voltage) + + + + + (dimension: voltage) + + + + + (dimension: voltage) + + + + + (dimension: voltage) + + + + + (dimension: time) + + + + + (dimension: time) + + + + + (dimension: conductance) + + + + + (dimension: current) + + @@ -1535,7 +1781,11 @@ - + + + (dimension: none) + + @@ -1551,12 +1801,36 @@ - - - - - - + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none): plays the role of an external injected current + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + @@ -1572,27 +1846,111 @@ - - - - - - - - - - - - - - - - - - - - - + + + (dimension: currentDensity) + + + + + (dimension: currentDensity) + + + + + (dimension: conductanceDensity) + + + + + (dimension: conductanceDensity) + + + + + (dimension: conductanceDensity) + + + + + (dimension: conductanceDensity) + + + + + (dimension: conductanceDensity) + + + + + (dimension: conductanceDensity) + + + + + (dimension: conductanceDensity) + + + + + (dimension: conductanceDensity) + + + + + (dimension: conductanceDensity) + + + + + (dimension: conductanceDensity) + + + + + (dimension: voltage) + + + + + (dimension: voltage) + + + + + (dimension: voltage) + + + + + (dimension: voltage) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: specificCapacitance) + + @@ -1705,10 +2063,26 @@ - - - - + + + (dimension: none): x coordinate of the point. Note: no dimension used, see description of **point3DWithDiam** for details. + + + + + (dimension: none): y coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + + + + + (dimension: none): z coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + + + + + (dimension: none): Diameter of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + + @@ -1929,7 +2303,11 @@ - + + + (dimension: voltage) + + @@ -1943,7 +2321,11 @@ - + + + (dimension: specificCapacitance) + + @@ -1957,7 +2339,11 @@ - + + + (dimension: voltage) + + @@ -1971,7 +2357,11 @@ - + + + (dimension: resistivity) + + @@ -1989,8 +2379,16 @@ - - + + + (dimension: none): The number of channels present. This will be multiplied by the time varying conductance of the individual ion channel ( which extends **baseIonChannel** ) to produce the total conductance + + + + + (dimension: voltage): The reversal potential of the current produced + + @@ -2024,7 +2422,11 @@ - + + + (dimension: voltage): The reversal potential of the current produced + + Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here @@ -2116,7 +2518,11 @@ - + + + (dimension: voltage): The reversal potential of the current produced + + @@ -2146,7 +2552,11 @@ - + + + (dimension: voltage) + + @@ -2213,7 +2623,11 @@ - + + + (dimension: permeability) + + @@ -2323,8 +2737,16 @@ - - + + + (dimension: concentration) + + + + + (dimension: concentration) + + @@ -2414,9 +2836,21 @@ - - - + + + (dimension: time): Delay before change in current. Current is zero prior to this. + + + + + (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. + + + + + (dimension: current): Amplitude of current pulse + + @@ -2433,9 +2867,21 @@ - - - + + + (dimension: time): Delay before change in current. Current is zero prior to this. + + + + + (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. + + + + + (dimension: none): Amplitude of current pulse + + @@ -2450,11 +2896,31 @@ - - - - - + + + (dimension: time): Delay before change in current. Current is zero prior to this. + + + + + (dimension: none): Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) + + + + + (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. + + + + + (dimension: current): Maximum amplitude of current + + + + + (dimension: time): Time period of oscillation + + @@ -2470,11 +2936,31 @@ - - - - - + + + (dimension: time): Delay before change in current. Current is zero prior to this. + + + + + (dimension: none): Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) + + + + + (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. + + + + + (dimension: none): Maximum amplitude of current + + + + + (dimension: time): Time period of oscillation + + @@ -2490,11 +2976,31 @@ - - - - - + + + (dimension: time): Delay before change in current. Current is baselineAmplitude prior to this. + + + + + (dimension: time): Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. + + + + + (dimension: current): Amplitude of linearly varying current at time delay + + + + + (dimension: current): Amplitude of linearly varying current at time delay + duration + + + + + (dimension: current): Amplitude of current before time delay, and after time delay + duration + + @@ -2510,11 +3016,31 @@ - - - - - + + + (dimension: time): Delay before change in current. Current is baselineAmplitude prior to this. + + + + + (dimension: time): Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. + + + + + (dimension: none): Amplitude of linearly varying current at time delay + + + + + (dimension: none): Amplitude of linearly varying current at time delay + duration + + + + + (dimension: none): Amplitude of current before time delay, and after time delay + duration + + @@ -2570,10 +3096,26 @@ - - - - + + + (dimension: time): Delay before change in current. Current is zero prior to this. + + + + + (dimension: time): Duration for attempting to keep parent at targetVoltage. Current is zero after delay + duration. + + + + + (dimension: voltage): Current will be applied to try to get parent to this target voltage + + + + + (dimension: resistance): Current will be calculated by the difference in voltage between the target and parent, divided by this value + + @@ -2589,13 +3131,41 @@ - - - - - - - + + + (dimension: none): Whether the voltage clamp is active ( 1 ) or inactive ( 0 ). + + + + + (dimension: time): Delay before switching from conditioningVoltage to testingVoltage. + + + + + (dimension: time): Duration to hold at testingVoltage. + + + + + (dimension: voltage): Target voltage before time delay + + + + + (dimension: voltage): Target voltage between times delay and delay + duration + + + + + (dimension: voltage): Target voltage after time duration + + + + + (dimension: resistance): Current will be calculated by the difference in voltage between the target and parent, divided by this value + + @@ -2611,7 +3181,11 @@ - + + + (dimension: time): Time at which to emit one spike event + + @@ -2665,7 +3239,11 @@ - + + + (dimension: time): Time between spikes. The first spike will be emitted after this time. + + @@ -2681,8 +3259,16 @@ - - + + + (dimension: time): Maximum interspike interval + + + + + (dimension: time): Minimum interspike interval + + @@ -2698,7 +3284,11 @@ - + + + (dimension: per_time): The average rate at which spikes are emitted + + @@ -2714,7 +3304,11 @@ - + + + (dimension: time): The minimum interspike interval + + @@ -2730,7 +3324,11 @@ - + + + (dimension: per_time): The average rate at which spikes are emitted + + @@ -2748,9 +3346,21 @@ - - - + + + (dimension: per_time) + + + + + (dimension: time) + + + + + (dimension: time) + + @@ -2870,7 +3480,11 @@ - + + + (dimension: none): Number of instances of this Component to create when the population is instantiated + + @@ -2935,9 +3549,21 @@ - - - + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + @@ -3070,8 +3696,16 @@ - - + + + (dimension: none) + + + + + (dimension: time) + + @@ -3141,7 +3775,11 @@ - + + + (dimension: none) + + @@ -3212,7 +3850,11 @@ - + + + (dimension: none) + + @@ -3283,7 +3925,11 @@ - + + + (dimension: none) + + @@ -3301,11 +3947,31 @@ - - - - - + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + + + + + (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + + + + + (dimension: none) + + @@ -3321,11 +3987,31 @@ - - - - - + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + @@ -3341,8 +4027,16 @@ - - + + + (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + + + + + (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + + @@ -3418,11 +4112,31 @@ - - - - - + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + @@ -3453,15 +4167,51 @@ - - - - - - - - - + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + + + + (dimension: none) + + @@ -3477,7 +4227,11 @@ - + + + (dimension: none) + + @@ -3493,7 +4247,11 @@ - + + + (dimension: none) + + @@ -3509,7 +4267,11 @@ - + + + (dimension: none) + + @@ -3557,9 +4319,21 @@ - - - + + + (dimension: time) + + + + + (dimension: time) + + + + + (dimension: per_time) + + From 6b095036dc2383b9757dd4a471ecea1316e749da Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 17 Nov 2021 17:22:07 +0000 Subject: [PATCH 117/136] chore: regenerate nml.py --- neuroml/nml/nml.py | 481 +++++++++++++++++++++++++++++++++++++++------ 1 file changed, 419 insertions(+), 62 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 33c4ce1b..4921c3e6 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Wed Nov 17 15:09:41 2021 by generateDS.py version 2.40.3. +# Generated Wed Nov 17 17:20:08 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -4770,7 +4770,12 @@ def _buildChildren( class Q10ConductanceScaling(GeneratedsSuper): - """Q10ConductanceScaling -- A value for the conductance scaling which varies as a standard function of the difference between the current temperature, **temperature,** and the temperature at which the conductance was originally determined, **experimentalTemp**""" + """Q10ConductanceScaling -- A value for the conductance scaling which varies as a standard function of the difference between the current temperature, **temperature,** and the temperature at which the conductance was originally determined, **experimentalTemp** + + * q10Factor -- (dimension: none) + * experimentalTemp -- (dimension: temperature) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -7215,7 +7220,14 @@ def _buildChildren( class Point3DWithDiam(GeneratedsSuper): - """Point3DWithDiam -- Base type for ComponentTypes which specify an ( **x,** **y,** **z** ) coordinate along with a **diameter.** Note: no dimension used in the attributes for these coordinates! These are assumed to have dimension micrometer ( 10^-6 m ). This is due to micrometers being the default option for the majority of neuronal morphology formats, and dimensions are omitted here to facilitate reading and writing of morphologies in NeuroML.""" + """Point3DWithDiam -- Base type for ComponentTypes which specify an ( **x,** **y,** **z** ) coordinate along with a **diameter.** Note: no dimension used in the attributes for these coordinates! These are assumed to have dimension micrometer ( 10^-6 m ). This is due to micrometers being the default option for the majority of neuronal morphology formats, and dimensions are omitted here to facilitate reading and writing of morphologies in NeuroML. + + * x -- (dimension: none): x coordinate of the point. Note: no dimension used, see description of **point3DWithDiam** for details. + * y -- (dimension: none): y coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + * z -- (dimension: none): z coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + * diameter -- (dimension: none): Diameter of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -9491,7 +9503,11 @@ def _buildChildren( class SpikeThresh(GeneratedsSuper): - """SpikeThresh -- Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction""" + """SpikeThresh -- Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction + + * value -- (dimension: voltage) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -9730,7 +9746,11 @@ def _buildChildren( class SpecificCapacitance(GeneratedsSuper): - """SpecificCapacitance -- Capacitance per unit area""" + """SpecificCapacitance -- Capacitance per unit area + + * value -- (dimension: specificCapacitance) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -9979,7 +9999,11 @@ def _buildChildren( class InitMembPotential(GeneratedsSuper): - """InitMembPotential -- Explicitly set initial membrane potential for the cell""" + """InitMembPotential -- Explicitly set initial membrane potential for the cell + + * value -- (dimension: voltage) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -10222,7 +10246,11 @@ def _buildChildren( class Resistivity(GeneratedsSuper): - """Resistivity -- The resistivity, or specific axial resistance, of the cytoplasm""" + """Resistivity -- The resistivity, or specific axial resistance, of the cytoplasm + + * value -- (dimension: resistivity) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -10872,6 +10900,9 @@ class Species(GeneratedsSuper): * ion -- Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now until LEMS implementation can select by id. TODO: remove. + * initialConcentration -- (dimension: concentration) + * initialExtConcentration -- (dimension: concentration) + """ __hash__ = GeneratedsSuper.__hash__ @@ -13033,7 +13064,13 @@ def __repr__(self): class Location(GeneratedsSuper): - """Location -- Specifies the ( x, y, z ) location of a single **instance** of a component in a **population**""" + """Location -- Specifies the ( x, y, z ) location of a single **instance** of a component in a **population** + + * x -- (dimension: none) + * y -- (dimension: none) + * z -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -14120,7 +14157,11 @@ def __str__(self): class InputW(Input): - """InputW -- Specifies input lists. Can set **weight** to scale individual inputs.""" + """InputW -- Specifies input lists. Can set **weight** to scale individual inputs. + + * weight -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -15442,7 +15483,13 @@ def _buildChildren( class SpikeSourcePoisson(Standalone): - """SpikeSourcePoisson -- Spike source, generating spikes according to a Poisson process.""" + """SpikeSourcePoisson -- Spike source, generating spikes according to a Poisson process. + + * start -- (dimension: time) + * duration -- (dimension: time) + * rate -- (dimension: per_time) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -16711,7 +16758,11 @@ def _buildChildren( class Population(Standalone): - """Population -- A population of components, with just one parameter for the **size,** i. e. number of components to create. Note: quite often this is used with type= **populationList** which means the size is determined by the number of **instance** s ( with **location** s ) in the list. The **size** attribute is still set, and there will be a validation error if this does not match the number in the list.""" + """Population -- A population of components, with just one parameter for the **size,** i. e. number of components to create. Note: quite often this is used with type= **populationList** which means the size is determined by the number of **instance** s ( with **location** s ) in the list. The **size** attribute is still set, and there will be a validation error if this does not match the number in the list. + + * size -- (dimension: none): Number of instances of this Component to create when the population is instantiated + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -18358,7 +18409,13 @@ def exportHdf5(self, h5file, h5Group): class TransientPoissonFiringSynapse(Standalone): - """TransientPoissonFiringSynapse -- Poisson spike generator firing at **averageRate** after a **delay** and for a **duration,** connected to single **synapse** that is triggered every time a spike is generated, providing an input current. Similar to ComponentType **poissonFiringSynapse** .""" + """TransientPoissonFiringSynapse -- Poisson spike generator firing at **averageRate** after a **delay** and for a **duration,** connected to single **synapse** that is triggered every time a spike is generated, providing an input current. Similar to ComponentType **poissonFiringSynapse** . + + * averageRate -- (dimension: per_time) + * delay -- (dimension: time) + * duration -- (dimension: time) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -18722,7 +18779,11 @@ def _buildChildren( class PoissonFiringSynapse(Standalone): - """PoissonFiringSynapse -- Poisson spike generator firing at **averageRate,** which is connected to single **synapse** that is triggered every time a spike is generated, producing an input current. See also **transientPoissonFiringSynapse** .""" + """PoissonFiringSynapse -- Poisson spike generator firing at **averageRate,** which is connected to single **synapse** that is triggered every time a spike is generated, producing an input current. See also **transientPoissonFiringSynapse** . + + * averageRate -- (dimension: per_time): The average rate at which spikes are emitted + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -18995,7 +19056,11 @@ def _buildChildren( class SpikeGeneratorPoisson(Standalone): - """SpikeGeneratorPoisson -- Generator of spikes whose ISI is distributed according to an exponential PDF with scale: 1 / **averageRate**""" + """SpikeGeneratorPoisson -- Generator of spikes whose ISI is distributed according to an exponential PDF with scale: 1 / **averageRate** + + * averageRate -- (dimension: per_time): The average rate at which spikes are emitted + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -19245,7 +19310,12 @@ def _buildChildren( class SpikeGeneratorRandom(Standalone): - """SpikeGeneratorRandom -- Generator of spikes with a random interspike interval of at least **minISI** and at most **maxISI**""" + """SpikeGeneratorRandom -- Generator of spikes with a random interspike interval of at least **minISI** and at most **maxISI** + + * maxISI -- (dimension: time): Maximum interspike interval + * minISI -- (dimension: time): Minimum interspike interval + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -19491,7 +19561,11 @@ def _buildChildren( class SpikeGenerator(Standalone): - """SpikeGenerator -- Simple generator of spikes at a regular interval set by **period**""" + """SpikeGenerator -- Simple generator of spikes at a regular interval set by **period** + + * period -- (dimension: time): Time between spikes. The first spike will be emitted after this time. + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -20168,7 +20242,11 @@ def _buildChildren( class Spike(BaseNonNegativeIntegerId): - """Spike -- Emits a single spike at the specified **time**""" + """Spike -- Emits a single spike at the specified **time** + + * time -- (dimension: time): Time at which to emit one spike event + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -20361,7 +20439,17 @@ def _buildChildren( class VoltageClampTriple(Standalone): - """VoltageClampTriple -- Voltage clamp with 3 clamp levels. Applies a variable current **i** ( through **simpleSeriesResistance** ) to try to keep parent cell at **conditioningVoltage** until time **delay,** **testingVoltage** until **delay** + **duration,** and **returnVoltage** afterwards. Only enabled if **active** = 1.""" + """VoltageClampTriple -- Voltage clamp with 3 clamp levels. Applies a variable current **i** ( through **simpleSeriesResistance** ) to try to keep parent cell at **conditioningVoltage** until time **delay,** **testingVoltage** until **delay** + **duration,** and **returnVoltage** afterwards. Only enabled if **active** = 1. + + * active -- (dimension: none): Whether the voltage clamp is active ( 1 ) or inactive ( 0 ). + * delay -- (dimension: time): Delay before switching from conditioningVoltage to testingVoltage. + * duration -- (dimension: time): Duration to hold at testingVoltage. + * conditioningVoltage -- (dimension: voltage): Target voltage before time delay + * testingVoltage -- (dimension: voltage): Target voltage between times delay and delay + duration + * returnVoltage -- (dimension: voltage): Target voltage after time duration + * simpleSeriesResistance -- (dimension: resistance): Current will be calculated by the difference in voltage between the target and parent, divided by this value + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -20848,7 +20936,14 @@ def _buildChildren( class VoltageClamp(Standalone): - """VoltageClamp -- Voltage clamp. Applies a variable current **i** to try to keep parent at **targetVoltage.** Not yet fully tested!!! Consider using voltageClampTriple!!""" + """VoltageClamp -- Voltage clamp. Applies a variable current **i** to try to keep parent at **targetVoltage.** Not yet fully tested!!! Consider using voltageClampTriple!! + + * delay -- (dimension: time): Delay before change in current. Current is zero prior to this. + * duration -- (dimension: time): Duration for attempting to keep parent at targetVoltage. Current is zero after delay + duration. + * targetVoltage -- (dimension: voltage): Current will be applied to try to get parent to this target voltage + * simpleSeriesResistance -- (dimension: resistance): Current will be calculated by the difference in voltage between the target and parent, divided by this value + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -21764,7 +21859,15 @@ def _buildChildren( class RampGeneratorDL(Standalone): - """RampGeneratorDL -- Dimensionless equivalent of **rampGenerator** . Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the dimensionless current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set""" + """RampGeneratorDL -- Dimensionless equivalent of **rampGenerator** . Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the dimensionless current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set + + * delay -- (dimension: time): Delay before change in current. Current is baselineAmplitude prior to this. + * duration -- (dimension: time): Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. + * startAmplitude -- (dimension: none): Amplitude of linearly varying current at time delay + * finishAmplitude -- (dimension: none): Amplitude of linearly varying current at time delay + duration + * baselineAmplitude -- (dimension: none): Amplitude of current before time delay, and after time delay + duration + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -22133,7 +22236,15 @@ def _buildChildren( class RampGenerator(Standalone): - """RampGenerator -- Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set""" + """RampGenerator -- Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set + + * delay -- (dimension: time): Delay before change in current. Current is baselineAmplitude prior to this. + * duration -- (dimension: time): Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. + * startAmplitude -- (dimension: current): Amplitude of linearly varying current at time delay + * finishAmplitude -- (dimension: current): Amplitude of linearly varying current at time delay + duration + * baselineAmplitude -- (dimension: current): Amplitude of current before time delay, and after time delay + duration + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -22502,7 +22613,15 @@ def _buildChildren( class SineGeneratorDL(Standalone): - """SineGeneratorDL -- Dimensionless equivalent of **sineGenerator** . Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set""" + """SineGeneratorDL -- Dimensionless equivalent of **sineGenerator** . Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set + + * delay -- (dimension: time): Delay before change in current. Current is zero prior to this. + * phase -- (dimension: none): Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) + * duration -- (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. + * amplitude -- (dimension: none): Maximum amplitude of current + * period -- (dimension: time): Time period of oscillation + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -22851,7 +22970,15 @@ def _buildChildren( class SineGenerator(Standalone): - """SineGenerator -- Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set""" + """SineGenerator -- Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set + + * delay -- (dimension: time): Delay before change in current. Current is zero prior to this. + * phase -- (dimension: none): Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) + * duration -- (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. + * amplitude -- (dimension: current): Maximum amplitude of current + * period -- (dimension: time): Time period of oscillation + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -23232,7 +23359,13 @@ def _buildChildren( class PulseGeneratorDL(Standalone): - """PulseGeneratorDL -- Dimensionless equivalent of **pulseGenerator** . Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set""" + """PulseGeneratorDL -- Dimensionless equivalent of **pulseGenerator** . Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set + + * delay -- (dimension: time): Delay before change in current. Current is zero prior to this. + * duration -- (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. + * amplitude -- (dimension: none): Amplitude of current pulse + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -23539,7 +23672,13 @@ def _buildChildren( class PulseGenerator(Standalone): - """PulseGenerator -- Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set""" + """PulseGenerator -- Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set + + * delay -- (dimension: time): Delay before change in current. Current is zero prior to this. + * duration -- (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. + * amplitude -- (dimension: current): Amplitude of current pulse + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -24595,6 +24734,7 @@ def _buildChildren( class ChannelDensityGHK(Base): """ChannelDensityGHK -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + * permeability -- (dimension: permeability) * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). @@ -25354,6 +25494,7 @@ def _buildChildren( class ChannelDensity(Base): """ChannelDensity -- Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** + * erev -- (dimension: voltage): The reversal potential of the current produced * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). @@ -26415,6 +26556,7 @@ def _buildChildren( class ChannelDensityNonUniform(Base): """ChannelDensityNonUniform -- Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + * erev -- (dimension: voltage): The reversal potential of the current produced * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). @@ -26759,6 +26901,8 @@ def _buildChildren( class ChannelPopulation(Base): """ChannelPopulation -- Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** + * number -- (dimension: none): The number of channels present. This will be multiplied by the time varying conductance of the individual ion channel ( which extends **baseIonChannel** ) to produce the total conductance + * erev -- (dimension: voltage): The reversal potential of the current produced * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). @@ -29499,6 +29643,9 @@ class FixedFactorConcentrationModel(Standalone): """FixedFactorConcentrationModel -- Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. * ion -- Should not be required, as it's present on the species element! + * restingConc -- (dimension: concentration) + * decayConstant -- (dimension: time) + * rho -- (dimension: rho_factor) """ @@ -29901,9 +30048,13 @@ def _buildChildren( class DecayingPoolConcentrationModel(Standalone): - """DecayingPoolConcentrationModel -- Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** + """DecayingPoolConcentrationModel -- Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course * + *decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** * ion -- Should not be required, as it's present on the species element! + * restingConc -- (dimension: concentration) + * decayConstant -- (dimension: time) + * shellThickness -- (dimension: length) """ @@ -38897,7 +39048,11 @@ def append(self, element): class BasePynnSynapse(BaseSynapse): - """BasePynnSynapse -- Base type for all PyNN synapses. Note, the current **I** produced is dimensionless, but it requires a membrane potential **v** with dimension voltage""" + """BasePynnSynapse -- Base type for all PyNN synapses. Note, the current **I** produced is dimensionless, but it requires a membrane potential **v** with dimension voltage + + * tau_syn -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -39091,7 +39246,15 @@ def _buildChildren( class basePyNNCell(BaseCell): - """basePyNNCell -- Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble. org/trac/PyNN/wiki/StandardModels""" + """basePyNNCell -- Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble. org/trac/PyNN/wiki/StandardModels + + * cm -- (dimension: none) + * i_offset -- (dimension: none) + * tau_syn_E -- (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + * tau_syn_I -- (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + * v_init -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -41245,7 +41408,11 @@ def __str__(self): class SpikeGeneratorRefPoisson(SpikeGeneratorPoisson): - """SpikeGeneratorRefPoisson -- Generator of spikes whose ISI distribution is the maximum entropy distribution over [ **minimumISI,** +infinity ) with mean: 1 / **averageRate**""" + """SpikeGeneratorRefPoisson -- Generator of spikes whose ISI distribution is the maximum entropy distribution over [ **minimumISI,** +infinity ) with mean: 1 / **averageRate** + + * minimumISI -- (dimension: time): The minimum interspike interval + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -41676,10 +41843,7 @@ def _buildChildren( class ChannelDensityNernstCa2(ChannelDensityNernst): - """ChannelDensityNernstCa2 -- This component is similar to the original compon - ent type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github. com/OpenSourceBrain/ghk-nernst. - - """ + """ChannelDensityNernstCa2 -- This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github. com/OpenSourceBrain/ghk-nernst.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -41854,7 +42018,11 @@ def _buildChildren( class ChannelDensityVShift(ChannelDensity): - """ChannelDensityVShift -- Same as **channelDensity** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates.""" + """ChannelDensityVShift -- Same as **channelDensity** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. + + * vShift -- (dimension: voltage) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -42816,7 +42984,31 @@ def summary(self): class PinskyRinzelCA3Cell(BaseCell): - """PinskyRinzelCA3Cell -- Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github. com/OpenSourceBrain/PinskyRinzelModel""" + """PinskyRinzelCA3Cell -- Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github. com/OpenSourceBrain/PinskyRinzelModel + + * iSoma -- (dimension: currentDensity) + * iDend -- (dimension: currentDensity) + * gc -- (dimension: conductanceDensity) + * gLs -- (dimension: conductanceDensity) + * gLd -- (dimension: conductanceDensity) + * gNa -- (dimension: conductanceDensity) + * gKdr -- (dimension: conductanceDensity) + * gCa -- (dimension: conductanceDensity) + * gKahp -- (dimension: conductanceDensity) + * gKC -- (dimension: conductanceDensity) + * gNmda -- (dimension: conductanceDensity) + * gAmpa -- (dimension: conductanceDensity) + * eNa -- (dimension: voltage) + * eCa -- (dimension: voltage) + * eK -- (dimension: voltage) + * eL -- (dimension: voltage) + * qd0 -- (dimension: none) + * pp -- (dimension: none) + * alphac -- (dimension: none) + * betac -- (dimension: none) + * cm -- (dimension: specificCapacitance) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -43703,7 +43895,16 @@ def _buildChildren( class FitzHughNagumo1969Cell(BaseCell): - """FitzHughNagumo1969Cell -- The Fitzhugh Nagumo model is a two-dimensional simplification of the Hodgkin-Huxley model of spike generation in squid giant axons. This system was suggested by FitzHugh ( FitzHugh R. [1961]: Impulses and physiological states in theoretical models of nerve membrane. Biophysical J. 1:445-466 ), who called it " Bonhoeffer-van der Pol model ", and the equivalent circuit by Nagumo et al. ( Nagumo J. , Arimoto S. , and Yoshizawa S. [1962] An active pulse transmission line simulating nerve axon. Proc IRE. 50:2061-2070. 1962 ). This version corresponds to the one described in FitzHugh R. [1969]: Mathematical models of excitation and propagation in nerve. Chapter 1 ( pp. 1-85 in H. P. Schwan, ed. Biological Engineering, McGraw-Hill Book Co. , N. Y. )""" + """FitzHughNagumo1969Cell -- The Fitzhugh Nagumo model is a two-dimensional simplification of the Hodgkin-Huxley model of spike generation in squid giant axons. This system was suggested by FitzHugh ( FitzHugh R. [1961]: Impulses and physiological states in theoretical models of nerve membrane. Biophysical J. 1:445-466 ), who called it " Bonhoeffer-van der Pol model ", and the equivalent circuit by Nagumo et al. ( Nagumo J. , Arimoto S. , and Yoshizawa S. [1962] An active pulse transmission line simulating nerve axon. Proc IRE. 50:2061-2070. 1962 ). This version corresponds to the one described in FitzHugh R. [1969]: Mathematical models of excitation and propagation in nerve. Chapter 1 ( pp. 1-85 in H. P. Schwan, ed. Biological Engineering, McGraw-Hill Book Co. , N. Y. ) + + * a -- (dimension: none) + * b -- (dimension: none) + * I -- (dimension: none): plays the role of an external injected current + * phi -- (dimension: none) + * V0 -- (dimension: none) + * W0 -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -44017,7 +44218,11 @@ def _buildChildren( class FitzHughNagumoCell(BaseCell): - """FitzHughNagumoCell -- Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github. com/NeuroML/NeuroML2/issues/42 )""" + """FitzHughNagumoCell -- Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github. com/NeuroML/NeuroML2/issues/42 ) + + * I -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -44232,7 +44437,7 @@ def _buildChildren( class BaseCellMembPotCap(BaseCell): """BaseCellMembPotCap -- Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) - * C -- This is to prevent it conflicting with attribute c (lowercase) e.g. in izhikevichCell2007 + * C -- (dimension: capacitance): Total capacitance of the cell membrane """ @@ -44476,7 +44681,16 @@ def _buildChildren( class IzhikevichCell(BaseCell): - """IzhikevichCell -- Cell based on the 2003 model of Izhikevich, see http://izhikevich. org/publications/spikes. htm""" + """IzhikevichCell -- Cell based on the 2003 model of Izhikevich, see http://izhikevich. org/publications/spikes. htm + + * v0 -- (dimension: voltage): Initial membrane potential + * thresh -- (dimension: voltage): Spike threshold + * a -- (dimension: none): Time scale of the recovery variable U + * b -- (dimension: none): Sensitivity of U to the subthreshold fluctuations of the membrane potential v + * c -- (dimension: none): After-spike reset value of v + * d -- (dimension: none): After-spike reset of u + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -44820,7 +45034,12 @@ def _buildChildren( class IafCell(BaseCell): - """IafCell -- Integrate and fire cell with capacitance **C,** **leakConductance** and **leakReversal**""" + """IafCell -- Integrate and fire cell with capacitance **C,** **leakConductance** and **leakReversal** + + * leakReversal -- (dimension: voltage) + * leakConductance -- (dimension: conductance) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -45227,7 +45446,12 @@ def _buildChildren( class IafTauCell(BaseCell): - """IafTauCell -- Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time constant **tau**""" + """IafTauCell -- Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time constant **tau** + + * leakReversal -- (dimension: voltage) + * tau -- (dimension: time) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -45569,6 +45793,11 @@ def _buildChildren( class GradedSynapse(BaseSynapse): """GradedSynapse -- Graded/analog synapse. Based on synapse in Methods of http://www. nature. com/neuro/journal/v7/n12/abs/nn1352. html Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html. + conductance -- (dimension: conductance) + delta -- (dimension: voltage): Slope of the activation curve + Vth -- (dimension: voltage): The half-activation voltage of the synapse + k -- (dimension: per_time): Rate constant for transmitter-receptor dissociation rate + erev -- (dimension: voltage): The reversal potential of the synapse """ @@ -45945,6 +46174,7 @@ def _buildChildren( class LinearGradedSynapse(BaseSynapse): """LinearGradedSynapse -- Behaves just like a one way gap junction. Behaves just like a one way gap junction. + conductance -- (dimension: conductance) """ @@ -46327,6 +46557,7 @@ def _buildChildren( class GapJunction(BaseSynapse): """GapJunction -- Gap junction/single electrical connection Gap junction/single electrical connection + conductance -- (dimension: conductance) """ @@ -47882,7 +48113,11 @@ def _buildChildren( class AlphaCondSynapse(BasePynnSynapse): - """AlphaCondSynapse -- Alpha synapse: rise time and decay time are both tau_syn. Conductance based synapse.""" + """AlphaCondSynapse -- Alpha synapse: rise time and decay time are both tau_syn. Conductance based synapse. + + * e_rev -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -48057,7 +48292,11 @@ def _buildChildren( class ExpCondSynapse(BasePynnSynapse): - """ExpCondSynapse -- Conductance based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn )""" + """ExpCondSynapse -- Conductance based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) + + * e_rev -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -48224,7 +48463,19 @@ def _buildChildren( class HH_cond_exp(basePyNNCell): - """HH_cond_exp -- Single-compartment Hodgkin-Huxley-type neuron with transient sodium and delayed-rectifier potassium currents using the ion channel models from Traub.""" + """HH_cond_exp -- Single-compartment Hodgkin-Huxley-type neuron with transient sodium and delayed-rectifier potassium currents using the ion channel models from Traub. + + * v_offset -- (dimension: none) + * e_rev_E -- (dimension: none) + * e_rev_I -- (dimension: none) + * e_rev_K -- (dimension: none) + * e_rev_Na -- (dimension: none) + * e_rev_leak -- (dimension: none) + * g_leak -- (dimension: none) + * gbar_K -- (dimension: none) + * gbar_Na -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -48539,7 +48790,15 @@ def _buildChildren( class basePyNNIaFCell(basePyNNCell): - """basePyNNIaFCell -- Base type of any PyNN standard integrate and fire model""" + """basePyNNIaFCell -- Base type of any PyNN standard integrate and fire model + + * tau_m -- (dimension: none) + * tau_refrac -- (dimension: none) + * v_reset -- (dimension: none) + * v_rest -- (dimension: none) + * v_thresh -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -49502,7 +49761,12 @@ def __str__(self): class ConnectionWD(BaseConnectionOldFormat): - """ConnectionWD -- Event connection between named components, which gets processed via a new instance of a synapse component which is created on the target component, includes setting of **weight** and **delay** for the synaptic connection""" + """ConnectionWD -- Event connection between named components, which gets processed via a new instance of a synapse component which is created on the target component, includes setting of **weight** and **delay** for the synaptic connection + + * weight -- (dimension: none) + * delay -- (dimension: time) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -50300,7 +50564,20 @@ def _buildChildren( class AdExIaFCell(BaseCellMembPotCap): - """AdExIaFCell -- Model based on Brette R and Gerstner W ( 2005 ) Adaptive Exponential Integrate-and-Fire Model as an Effective Description of Neuronal Activity. J Neurophysiol 94:3637-3642""" + """AdExIaFCell -- Model based on Brette R and Gerstner W ( 2005 ) Adaptive Exponential Integrate-and-Fire Model as an Effective Description of Neuronal Activity. J Neurophysiol 94:3637-3642 + + * gL -- (dimension: conductance) + * EL -- (dimension: voltage) + * reset -- (dimension: voltage) + * VT -- (dimension: voltage) + * thresh -- (dimension: voltage) + * delT -- (dimension: voltage) + * tauw -- (dimension: time) + * refract -- (dimension: time) + * a -- (dimension: conductance) + * b -- (dimension: current) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -50825,7 +51102,19 @@ def _buildChildren( class Izhikevich2007Cell(BaseCellMembPotCap): - """Izhikevich2007Cell -- Cell based on the modified Izhikevich model in Izhikevich 2007, Dynamical systems in neuroscience, MIT Press""" + """Izhikevich2007Cell -- Cell based on the modified Izhikevich model in Izhikevich 2007, Dynamical systems in neuroscience, MIT Press + + * v0 -- (dimension: voltage) + * k -- (dimension: conductance_per_voltage) + * vr -- (dimension: voltage) + * vt -- (dimension: voltage) + * vpeak -- (dimension: voltage) + * a -- (dimension: per_time) + * b -- (dimension: conductance) + * c -- (dimension: voltage) + * d -- (dimension: current) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -51361,7 +51650,11 @@ def _buildChildren( class IafRefCell(IafCell): - """IafRefCell -- Integrate and fire cell with capacitance **C,** **leakConductance,** **leakReversal** and refractory period **refract**""" + """IafRefCell -- Integrate and fire cell with capacitance **C,** **leakConductance,** **leakReversal** and refractory period **refract** + + * refract -- (dimension: time) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -51581,7 +51874,11 @@ def _buildChildren( class IafTauRefCell(IafTauCell): - """IafTauRefCell -- Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time course **tau.** It has a refractory period of **refract** after spiking""" + """IafTauRefCell -- Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time course **tau.** It has a refractory period of **refract** after spiking + + * refract -- (dimension: time) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -52080,7 +52377,12 @@ def _buildChildren( class AlphaCurrentSynapse(BaseCurrentBasedSynapse): - """AlphaCurrentSynapse -- Alpha current synapse: rise time and decay time are both **tau.**""" + """AlphaCurrentSynapse -- Alpha current synapse: rise time and decay time are both **tau.** + + * tau -- (dimension: time): Time course for rise and decay + * ibase -- (dimension: current): Baseline current increase after receiving a spike + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -52354,7 +52656,13 @@ def _buildChildren( class BaseConductanceBasedSynapseTwo(BaseVoltageDepSynapse): - """BaseConductanceBasedSynapseTwo -- Synapse model suited for a sum of two expTwoSynapses which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027""" + """BaseConductanceBasedSynapseTwo -- Synapse model suited for a sum of two expTwoSynapses which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 + + * gbase1 -- (dimension: conductance): Baseline conductance 1 + * gbase2 -- (dimension: conductance): Baseline conductance 2 + * erev -- (dimension: voltage): Reversal potential of the synapse + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -52697,7 +53005,12 @@ def _buildChildren( class BaseConductanceBasedSynapse(BaseVoltageDepSynapse): - """BaseConductanceBasedSynapse -- Synapse model which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027""" + """BaseConductanceBasedSynapse -- Synapse model which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 + + * gbase -- (dimension: conductance): Baseline conductance, generally the maximum conductance following a single spike + * erev -- (dimension: voltage): Reversal potential of the synapse + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -53009,7 +53322,11 @@ def _buildChildren( class IonChannelVShift(IonChannel): - """IonChannelVShift -- Same as **ionChannel** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates.""" + """IonChannelVShift -- Same as **ionChannel** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. + + * vShift -- (dimension: voltage) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -53795,7 +54112,12 @@ def _buildChildren( class basePyNNIaFCondCell(basePyNNIaFCell): - """basePyNNIaFCondCell -- Base type of conductance based PyNN IaF cell models""" + """basePyNNIaFCondCell -- Base type of conductance based PyNN IaF cell models + + * e_rev_E -- (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + * e_rev_I -- (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -54476,7 +54798,13 @@ def __str__(self): class ExpThreeSynapse(BaseConductanceBasedSynapseTwo): - """ExpThreeSynapse -- Ohmic synapse similar to expTwoSynapse but consisting of two components that can differ in decay times and max conductances but share the same rise time.""" + """ExpThreeSynapse -- Ohmic synapse similar to expTwoSynapse but consisting of two components that can differ in decay times and max conductances but share the same rise time. + + * tauDecay1 -- (dimension: time) + * tauDecay2 -- (dimension: time) + * tauRise -- (dimension: time) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -54759,7 +55087,12 @@ def _buildChildren( class ExpTwoSynapse(BaseConductanceBasedSynapse): - """ExpTwoSynapse -- Ohmic synapse model whose conductance waveform on receiving an event has a rise time of **tauRise** and a decay time of **tauDecay.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.**""" + """ExpTwoSynapse -- Ohmic synapse model whose conductance waveform on receiving an event has a rise time of **tauRise** and a decay time of **tauDecay.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** + + * tauDecay -- (dimension: time) + * tauRise -- (dimension: time) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -55030,7 +55363,11 @@ def _buildChildren( class ExpOneSynapse(BaseConductanceBasedSynapse): - """ExpOneSynapse -- Ohmic synapse model whose conductance rises instantaneously by ( **gbase** * **weight** ) on receiving an event, and which decays exponentially to zero with time course **tauDecay**""" + """ExpOneSynapse -- Ohmic synapse model whose conductance rises instantaneously by ( **gbase** * **weight** ) on receiving an event, and which decays exponentially to zero with time course **tauDecay** + + * tauDecay -- (dimension: time): Time course of decay + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -55253,7 +55590,11 @@ def _buildChildren( class AlphaSynapse(BaseConductanceBasedSynapse): - """AlphaSynapse -- Ohmic synapse model where rise time and decay time are both **tau.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.**""" + """AlphaSynapse -- Ohmic synapse model where rise time and decay time are both **tau.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** + + * tau -- (dimension: time): Time course of rise/decay + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -55468,7 +55809,15 @@ def _buildChildren( class EIF_cond_exp_isfa_ista(basePyNNIaFCondCell): - """EIF_cond_exp_isfa_ista -- Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with exponentially-decaying post-synaptic conductance""" + """EIF_cond_exp_isfa_ista -- Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with exponentially-decaying post-synaptic conductance + + * a -- (dimension: none) + * b -- (dimension: none) + * delta_T -- (dimension: none) + * tau_w -- (dimension: none) + * v_spike -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -56117,7 +56466,11 @@ def _buildChildren( class ContinuousConnectionInstanceW(ContinuousConnectionInstance): - """ContinuousConnectionInstanceW -- An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. Includes setting of **weight** for the connection""" + """ContinuousConnectionInstanceW -- An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. Includes setting of **weight** for the connection + + * weight -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -56338,7 +56691,11 @@ def __str__(self): class ElectricalConnectionInstanceW(ElectricalConnectionInstance): - """ElectricalConnectionInstanceW -- To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. Includes setting of **weight** for the connection""" + """ElectricalConnectionInstanceW -- To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. Includes setting of **weight** for the connection + + * weight -- (dimension: none) + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ From bb53446b400b5d88d85d1d59cff2cea05eb6f44f Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 17 Nov 2021 17:25:10 +0000 Subject: [PATCH 118/136] docs: show base classes --- doc/helpers/nml-core-docs.py | 3 +- doc/userdocs/coreclasses_list.txt | 201 ++++++++++++++++++++++++++++++ 2 files changed, 203 insertions(+), 1 deletion(-) diff --git a/doc/helpers/nml-core-docs.py b/doc/helpers/nml-core-docs.py index a59aa3c4..b15124d2 100644 --- a/doc/helpers/nml-core-docs.py +++ b/doc/helpers/nml-core-docs.py @@ -22,7 +22,7 @@ if line.startswith("class"): # get the class signature aclass = line[len("class "):] - aclass = aclass.split('(')[0].split(':')[0] + aclass = aclass.split('(')[0] if aclass not in excluded_classes: classes.append(aclass) @@ -40,6 +40,7 @@ .. autoclass:: neuroml.nml.nml.{} :members: :undoc-members: + :show-inheritance: """.format( aclass, "#" * len(aclass), aclass, diff --git a/doc/userdocs/coreclasses_list.txt b/doc/userdocs/coreclasses_list.txt index f7f17054..79e38673 100644 --- a/doc/userdocs/coreclasses_list.txt +++ b/doc/userdocs/coreclasses_list.txt @@ -6,6 +6,7 @@ AdExIaFCell .. autoclass:: neuroml.nml.nml.AdExIaFCell :members: :undoc-members: + :show-inheritance: @@ -15,6 +16,7 @@ AlphaCondSynapse .. autoclass:: neuroml.nml.nml.AlphaCondSynapse :members: :undoc-members: + :show-inheritance: @@ -24,6 +26,7 @@ AlphaCurrSynapse .. autoclass:: neuroml.nml.nml.AlphaCurrSynapse :members: :undoc-members: + :show-inheritance: @@ -33,6 +36,7 @@ AlphaCurrentSynapse .. autoclass:: neuroml.nml.nml.AlphaCurrentSynapse :members: :undoc-members: + :show-inheritance: @@ -42,6 +46,7 @@ AlphaSynapse .. autoclass:: neuroml.nml.nml.AlphaSynapse :members: :undoc-members: + :show-inheritance: @@ -51,6 +56,7 @@ Annotation .. autoclass:: neuroml.nml.nml.Annotation :members: :undoc-members: + :show-inheritance: @@ -60,6 +66,7 @@ Base .. autoclass:: neuroml.nml.nml.Base :members: :undoc-members: + :show-inheritance: @@ -69,6 +76,7 @@ BaseCell .. autoclass:: neuroml.nml.nml.BaseCell :members: :undoc-members: + :show-inheritance: @@ -78,6 +86,7 @@ BaseCellMembPotCap .. autoclass:: neuroml.nml.nml.BaseCellMembPotCap :members: :undoc-members: + :show-inheritance: @@ -87,6 +96,7 @@ BaseConductanceBasedSynapse .. autoclass:: neuroml.nml.nml.BaseConductanceBasedSynapse :members: :undoc-members: + :show-inheritance: @@ -96,6 +106,7 @@ BaseConductanceBasedSynapseTwo .. autoclass:: neuroml.nml.nml.BaseConductanceBasedSynapseTwo :members: :undoc-members: + :show-inheritance: @@ -105,6 +116,7 @@ BaseConnection .. autoclass:: neuroml.nml.nml.BaseConnection :members: :undoc-members: + :show-inheritance: @@ -114,6 +126,7 @@ BaseConnectionNewFormat .. autoclass:: neuroml.nml.nml.BaseConnectionNewFormat :members: :undoc-members: + :show-inheritance: @@ -123,6 +136,7 @@ BaseConnectionOldFormat .. autoclass:: neuroml.nml.nml.BaseConnectionOldFormat :members: :undoc-members: + :show-inheritance: @@ -132,6 +146,7 @@ BaseCurrentBasedSynapse .. autoclass:: neuroml.nml.nml.BaseCurrentBasedSynapse :members: :undoc-members: + :show-inheritance: @@ -141,6 +156,7 @@ BaseNonNegativeIntegerId .. autoclass:: neuroml.nml.nml.BaseNonNegativeIntegerId :members: :undoc-members: + :show-inheritance: @@ -150,6 +166,7 @@ BaseProjection .. autoclass:: neuroml.nml.nml.BaseProjection :members: :undoc-members: + :show-inheritance: @@ -159,6 +176,7 @@ BasePynnSynapse .. autoclass:: neuroml.nml.nml.BasePynnSynapse :members: :undoc-members: + :show-inheritance: @@ -168,6 +186,7 @@ BaseSynapse .. autoclass:: neuroml.nml.nml.BaseSynapse :members: :undoc-members: + :show-inheritance: @@ -177,6 +196,7 @@ BaseVoltageDepSynapse .. autoclass:: neuroml.nml.nml.BaseVoltageDepSynapse :members: :undoc-members: + :show-inheritance: @@ -186,6 +206,7 @@ BaseWithoutId .. autoclass:: neuroml.nml.nml.BaseWithoutId :members: :undoc-members: + :show-inheritance: @@ -195,6 +216,7 @@ BiophysicalProperties .. autoclass:: neuroml.nml.nml.BiophysicalProperties :members: :undoc-members: + :show-inheritance: @@ -204,6 +226,7 @@ BiophysicalProperties2CaPools .. autoclass:: neuroml.nml.nml.BiophysicalProperties2CaPools :members: :undoc-members: + :show-inheritance: @@ -213,6 +236,7 @@ BlockMechanism .. autoclass:: neuroml.nml.nml.BlockMechanism :members: :undoc-members: + :show-inheritance: @@ -222,6 +246,7 @@ BlockingPlasticSynapse .. autoclass:: neuroml.nml.nml.BlockingPlasticSynapse :members: :undoc-members: + :show-inheritance: @@ -231,6 +256,7 @@ Case .. autoclass:: neuroml.nml.nml.Case :members: :undoc-members: + :show-inheritance: @@ -240,6 +266,7 @@ Cell .. autoclass:: neuroml.nml.nml.Cell :members: :undoc-members: + :show-inheritance: @@ -249,6 +276,7 @@ Cell2CaPools .. autoclass:: neuroml.nml.nml.Cell2CaPools :members: :undoc-members: + :show-inheritance: @@ -258,6 +286,7 @@ CellSet .. autoclass:: neuroml.nml.nml.CellSet :members: :undoc-members: + :show-inheritance: @@ -267,6 +296,7 @@ ChannelDensity .. autoclass:: neuroml.nml.nml.ChannelDensity :members: :undoc-members: + :show-inheritance: @@ -276,6 +306,7 @@ ChannelDensityGHK .. autoclass:: neuroml.nml.nml.ChannelDensityGHK :members: :undoc-members: + :show-inheritance: @@ -285,6 +316,7 @@ ChannelDensityGHK2 .. autoclass:: neuroml.nml.nml.ChannelDensityGHK2 :members: :undoc-members: + :show-inheritance: @@ -294,6 +326,7 @@ ChannelDensityNernst .. autoclass:: neuroml.nml.nml.ChannelDensityNernst :members: :undoc-members: + :show-inheritance: @@ -303,6 +336,7 @@ ChannelDensityNernstCa2 .. autoclass:: neuroml.nml.nml.ChannelDensityNernstCa2 :members: :undoc-members: + :show-inheritance: @@ -312,6 +346,7 @@ ChannelDensityNonUniform .. autoclass:: neuroml.nml.nml.ChannelDensityNonUniform :members: :undoc-members: + :show-inheritance: @@ -321,6 +356,7 @@ ChannelDensityNonUniformGHK .. autoclass:: neuroml.nml.nml.ChannelDensityNonUniformGHK :members: :undoc-members: + :show-inheritance: @@ -330,6 +366,7 @@ ChannelDensityNonUniformNernst .. autoclass:: neuroml.nml.nml.ChannelDensityNonUniformNernst :members: :undoc-members: + :show-inheritance: @@ -339,6 +376,7 @@ ChannelDensityVShift .. autoclass:: neuroml.nml.nml.ChannelDensityVShift :members: :undoc-members: + :show-inheritance: @@ -348,6 +386,7 @@ ChannelPopulation .. autoclass:: neuroml.nml.nml.ChannelPopulation :members: :undoc-members: + :show-inheritance: @@ -357,6 +396,7 @@ ClosedState .. autoclass:: neuroml.nml.nml.ClosedState :members: :undoc-members: + :show-inheritance: @@ -366,6 +406,7 @@ ComponentType .. autoclass:: neuroml.nml.nml.ComponentType :members: :undoc-members: + :show-inheritance: @@ -375,6 +416,7 @@ CompoundInput .. autoclass:: neuroml.nml.nml.CompoundInput :members: :undoc-members: + :show-inheritance: @@ -384,6 +426,7 @@ CompoundInputDL .. autoclass:: neuroml.nml.nml.CompoundInputDL :members: :undoc-members: + :show-inheritance: @@ -393,6 +436,7 @@ ConcentrationModel_D .. autoclass:: neuroml.nml.nml.ConcentrationModel_D :members: :undoc-members: + :show-inheritance: @@ -402,6 +446,7 @@ ConditionalDerivedVariable .. autoclass:: neuroml.nml.nml.ConditionalDerivedVariable :members: :undoc-members: + :show-inheritance: @@ -411,6 +456,7 @@ Connection .. autoclass:: neuroml.nml.nml.Connection :members: :undoc-members: + :show-inheritance: @@ -420,6 +466,7 @@ ConnectionWD .. autoclass:: neuroml.nml.nml.ConnectionWD :members: :undoc-members: + :show-inheritance: @@ -429,6 +476,7 @@ Constant .. autoclass:: neuroml.nml.nml.Constant :members: :undoc-members: + :show-inheritance: @@ -438,6 +486,7 @@ ContinuousConnection .. autoclass:: neuroml.nml.nml.ContinuousConnection :members: :undoc-members: + :show-inheritance: @@ -447,6 +496,7 @@ ContinuousConnectionInstance .. autoclass:: neuroml.nml.nml.ContinuousConnectionInstance :members: :undoc-members: + :show-inheritance: @@ -456,6 +506,7 @@ ContinuousConnectionInstanceW .. autoclass:: neuroml.nml.nml.ContinuousConnectionInstanceW :members: :undoc-members: + :show-inheritance: @@ -465,6 +516,7 @@ ContinuousProjection .. autoclass:: neuroml.nml.nml.ContinuousProjection :members: :undoc-members: + :show-inheritance: @@ -474,6 +526,7 @@ DecayingPoolConcentrationModel .. autoclass:: neuroml.nml.nml.DecayingPoolConcentrationModel :members: :undoc-members: + :show-inheritance: @@ -483,6 +536,7 @@ DerivedVariable .. autoclass:: neuroml.nml.nml.DerivedVariable :members: :undoc-members: + :show-inheritance: @@ -492,6 +546,7 @@ DistalDetails .. autoclass:: neuroml.nml.nml.DistalDetails :members: :undoc-members: + :show-inheritance: @@ -501,6 +556,7 @@ DoubleSynapse .. autoclass:: neuroml.nml.nml.DoubleSynapse :members: :undoc-members: + :show-inheritance: @@ -510,6 +566,7 @@ Dynamics .. autoclass:: neuroml.nml.nml.Dynamics :members: :undoc-members: + :show-inheritance: @@ -519,6 +576,7 @@ EIF_cond_alpha_isfa_ista .. autoclass:: neuroml.nml.nml.EIF_cond_alpha_isfa_ista :members: :undoc-members: + :show-inheritance: @@ -528,6 +586,7 @@ EIF_cond_exp_isfa_ista .. autoclass:: neuroml.nml.nml.EIF_cond_exp_isfa_ista :members: :undoc-members: + :show-inheritance: @@ -537,6 +596,7 @@ ElectricalConnection .. autoclass:: neuroml.nml.nml.ElectricalConnection :members: :undoc-members: + :show-inheritance: @@ -546,6 +606,7 @@ ElectricalConnectionInstance .. autoclass:: neuroml.nml.nml.ElectricalConnectionInstance :members: :undoc-members: + :show-inheritance: @@ -555,6 +616,7 @@ ElectricalConnectionInstanceW .. autoclass:: neuroml.nml.nml.ElectricalConnectionInstanceW :members: :undoc-members: + :show-inheritance: @@ -564,6 +626,7 @@ ElectricalProjection .. autoclass:: neuroml.nml.nml.ElectricalProjection :members: :undoc-members: + :show-inheritance: @@ -573,6 +636,7 @@ ExpCondSynapse .. autoclass:: neuroml.nml.nml.ExpCondSynapse :members: :undoc-members: + :show-inheritance: @@ -582,6 +646,7 @@ ExpCurrSynapse .. autoclass:: neuroml.nml.nml.ExpCurrSynapse :members: :undoc-members: + :show-inheritance: @@ -591,6 +656,7 @@ ExpOneSynapse .. autoclass:: neuroml.nml.nml.ExpOneSynapse :members: :undoc-members: + :show-inheritance: @@ -600,6 +666,7 @@ ExpThreeSynapse .. autoclass:: neuroml.nml.nml.ExpThreeSynapse :members: :undoc-members: + :show-inheritance: @@ -609,6 +676,7 @@ ExpTwoSynapse .. autoclass:: neuroml.nml.nml.ExpTwoSynapse :members: :undoc-members: + :show-inheritance: @@ -618,6 +686,7 @@ ExplicitInput .. autoclass:: neuroml.nml.nml.ExplicitInput :members: :undoc-members: + :show-inheritance: @@ -627,6 +696,7 @@ Exposure .. autoclass:: neuroml.nml.nml.Exposure :members: :undoc-members: + :show-inheritance: @@ -636,6 +706,7 @@ ExtracellularProperties .. autoclass:: neuroml.nml.nml.ExtracellularProperties :members: :undoc-members: + :show-inheritance: @@ -645,6 +716,7 @@ ExtracellularPropertiesLocal .. autoclass:: neuroml.nml.nml.ExtracellularPropertiesLocal :members: :undoc-members: + :show-inheritance: @@ -654,6 +726,7 @@ FitzHughNagumo1969Cell .. autoclass:: neuroml.nml.nml.FitzHughNagumo1969Cell :members: :undoc-members: + :show-inheritance: @@ -663,6 +736,7 @@ FitzHughNagumoCell .. autoclass:: neuroml.nml.nml.FitzHughNagumoCell :members: :undoc-members: + :show-inheritance: @@ -672,6 +746,7 @@ FixedFactorConcentrationModel .. autoclass:: neuroml.nml.nml.FixedFactorConcentrationModel :members: :undoc-members: + :show-inheritance: @@ -681,6 +756,7 @@ ForwardTransition .. autoclass:: neuroml.nml.nml.ForwardTransition :members: :undoc-members: + :show-inheritance: @@ -690,6 +766,7 @@ GapJunction .. autoclass:: neuroml.nml.nml.GapJunction :members: :undoc-members: + :show-inheritance: @@ -699,6 +776,7 @@ GateFractional .. autoclass:: neuroml.nml.nml.GateFractional :members: :undoc-members: + :show-inheritance: @@ -708,6 +786,7 @@ GateFractionalSubgate .. autoclass:: neuroml.nml.nml.GateFractionalSubgate :members: :undoc-members: + :show-inheritance: @@ -717,6 +796,7 @@ GateHHInstantaneous .. autoclass:: neuroml.nml.nml.GateHHInstantaneous :members: :undoc-members: + :show-inheritance: @@ -726,6 +806,7 @@ GateHHRates .. autoclass:: neuroml.nml.nml.GateHHRates :members: :undoc-members: + :show-inheritance: @@ -735,6 +816,7 @@ GateHHRatesInf .. autoclass:: neuroml.nml.nml.GateHHRatesInf :members: :undoc-members: + :show-inheritance: @@ -744,6 +826,7 @@ GateHHRatesTau .. autoclass:: neuroml.nml.nml.GateHHRatesTau :members: :undoc-members: + :show-inheritance: @@ -753,6 +836,7 @@ GateHHRatesTauInf .. autoclass:: neuroml.nml.nml.GateHHRatesTauInf :members: :undoc-members: + :show-inheritance: @@ -762,6 +846,7 @@ GateHHTauInf .. autoclass:: neuroml.nml.nml.GateHHTauInf :members: :undoc-members: + :show-inheritance: @@ -771,6 +856,7 @@ GateHHUndetermined .. autoclass:: neuroml.nml.nml.GateHHUndetermined :members: :undoc-members: + :show-inheritance: @@ -780,6 +866,7 @@ GateKS .. autoclass:: neuroml.nml.nml.GateKS :members: :undoc-members: + :show-inheritance: @@ -789,6 +876,7 @@ GradedSynapse .. autoclass:: neuroml.nml.nml.GradedSynapse :members: :undoc-members: + :show-inheritance: @@ -798,6 +886,7 @@ GridLayout .. autoclass:: neuroml.nml.nml.GridLayout :members: :undoc-members: + :show-inheritance: @@ -807,6 +896,7 @@ HHRate .. autoclass:: neuroml.nml.nml.HHRate :members: :undoc-members: + :show-inheritance: @@ -816,6 +906,7 @@ HHTime .. autoclass:: neuroml.nml.nml.HHTime :members: :undoc-members: + :show-inheritance: @@ -825,6 +916,7 @@ HHVariable .. autoclass:: neuroml.nml.nml.HHVariable :members: :undoc-members: + :show-inheritance: @@ -834,6 +926,7 @@ HH_cond_exp .. autoclass:: neuroml.nml.nml.HH_cond_exp :members: :undoc-members: + :show-inheritance: @@ -843,6 +936,7 @@ IF_cond_alpha .. autoclass:: neuroml.nml.nml.IF_cond_alpha :members: :undoc-members: + :show-inheritance: @@ -852,6 +946,7 @@ IF_cond_exp .. autoclass:: neuroml.nml.nml.IF_cond_exp :members: :undoc-members: + :show-inheritance: @@ -861,6 +956,7 @@ IF_curr_alpha .. autoclass:: neuroml.nml.nml.IF_curr_alpha :members: :undoc-members: + :show-inheritance: @@ -870,6 +966,7 @@ IF_curr_exp .. autoclass:: neuroml.nml.nml.IF_curr_exp :members: :undoc-members: + :show-inheritance: @@ -879,6 +976,7 @@ IafCell .. autoclass:: neuroml.nml.nml.IafCell :members: :undoc-members: + :show-inheritance: @@ -888,6 +986,7 @@ IafRefCell .. autoclass:: neuroml.nml.nml.IafRefCell :members: :undoc-members: + :show-inheritance: @@ -897,6 +996,7 @@ IafTauCell .. autoclass:: neuroml.nml.nml.IafTauCell :members: :undoc-members: + :show-inheritance: @@ -906,6 +1006,7 @@ IafTauRefCell .. autoclass:: neuroml.nml.nml.IafTauRefCell :members: :undoc-members: + :show-inheritance: @@ -915,6 +1016,7 @@ Include .. autoclass:: neuroml.nml.nml.Include :members: :undoc-members: + :show-inheritance: @@ -924,6 +1026,7 @@ IncludeType .. autoclass:: neuroml.nml.nml.IncludeType :members: :undoc-members: + :show-inheritance: @@ -933,6 +1036,7 @@ InhomogeneousParameter .. autoclass:: neuroml.nml.nml.InhomogeneousParameter :members: :undoc-members: + :show-inheritance: @@ -942,6 +1046,7 @@ InhomogeneousValue .. autoclass:: neuroml.nml.nml.InhomogeneousValue :members: :undoc-members: + :show-inheritance: @@ -951,6 +1056,7 @@ InitMembPotential .. autoclass:: neuroml.nml.nml.InitMembPotential :members: :undoc-members: + :show-inheritance: @@ -960,6 +1066,7 @@ Input .. autoclass:: neuroml.nml.nml.Input :members: :undoc-members: + :show-inheritance: @@ -969,6 +1076,7 @@ InputList .. autoclass:: neuroml.nml.nml.InputList :members: :undoc-members: + :show-inheritance: @@ -978,6 +1086,7 @@ InputW .. autoclass:: neuroml.nml.nml.InputW :members: :undoc-members: + :show-inheritance: @@ -987,6 +1096,7 @@ Instance .. autoclass:: neuroml.nml.nml.Instance :members: :undoc-members: + :show-inheritance: @@ -996,6 +1106,7 @@ InstanceRequirement .. autoclass:: neuroml.nml.nml.InstanceRequirement :members: :undoc-members: + :show-inheritance: @@ -1005,6 +1116,7 @@ IntracellularProperties .. autoclass:: neuroml.nml.nml.IntracellularProperties :members: :undoc-members: + :show-inheritance: @@ -1014,6 +1126,7 @@ IntracellularProperties2CaPools .. autoclass:: neuroml.nml.nml.IntracellularProperties2CaPools :members: :undoc-members: + :show-inheritance: @@ -1023,6 +1136,7 @@ IonChannel .. autoclass:: neuroml.nml.nml.IonChannel :members: :undoc-members: + :show-inheritance: @@ -1032,6 +1146,7 @@ IonChannelHH .. autoclass:: neuroml.nml.nml.IonChannelHH :members: :undoc-members: + :show-inheritance: @@ -1041,6 +1156,7 @@ IonChannelKS .. autoclass:: neuroml.nml.nml.IonChannelKS :members: :undoc-members: + :show-inheritance: @@ -1050,6 +1166,7 @@ IonChannelScalable .. autoclass:: neuroml.nml.nml.IonChannelScalable :members: :undoc-members: + :show-inheritance: @@ -1059,6 +1176,7 @@ IonChannelVShift .. autoclass:: neuroml.nml.nml.IonChannelVShift :members: :undoc-members: + :show-inheritance: @@ -1068,6 +1186,7 @@ Izhikevich2007Cell .. autoclass:: neuroml.nml.nml.Izhikevich2007Cell :members: :undoc-members: + :show-inheritance: @@ -1077,6 +1196,7 @@ IzhikevichCell .. autoclass:: neuroml.nml.nml.IzhikevichCell :members: :undoc-members: + :show-inheritance: @@ -1086,6 +1206,7 @@ LEMS_Property .. autoclass:: neuroml.nml.nml.LEMS_Property :members: :undoc-members: + :show-inheritance: @@ -1095,6 +1216,7 @@ Layout .. autoclass:: neuroml.nml.nml.Layout :members: :undoc-members: + :show-inheritance: @@ -1104,6 +1226,7 @@ LinearGradedSynapse .. autoclass:: neuroml.nml.nml.LinearGradedSynapse :members: :undoc-members: + :show-inheritance: @@ -1113,6 +1236,7 @@ Location .. autoclass:: neuroml.nml.nml.Location :members: :undoc-members: + :show-inheritance: @@ -1122,6 +1246,7 @@ Member .. autoclass:: neuroml.nml.nml.Member :members: :undoc-members: + :show-inheritance: @@ -1131,6 +1256,7 @@ MembraneProperties .. autoclass:: neuroml.nml.nml.MembraneProperties :members: :undoc-members: + :show-inheritance: @@ -1140,6 +1266,19 @@ MembraneProperties2CaPools .. autoclass:: neuroml.nml.nml.MembraneProperties2CaPools :members: :undoc-members: + :show-inheritance: + + + +MixedContainer: + +################ + +.. autoclass:: neuroml.nml.nml.MixedContainer: + + :members: + :undoc-members: + :show-inheritance: @@ -1149,6 +1288,7 @@ Morphology .. autoclass:: neuroml.nml.nml.Morphology :members: :undoc-members: + :show-inheritance: @@ -1158,6 +1298,7 @@ NamedDimensionalType .. autoclass:: neuroml.nml.nml.NamedDimensionalType :members: :undoc-members: + :show-inheritance: @@ -1167,6 +1308,7 @@ NamedDimensionalVariable .. autoclass:: neuroml.nml.nml.NamedDimensionalVariable :members: :undoc-members: + :show-inheritance: @@ -1176,6 +1318,7 @@ Network .. autoclass:: neuroml.nml.nml.Network :members: :undoc-members: + :show-inheritance: @@ -1185,6 +1328,7 @@ NeuroMLDocument .. autoclass:: neuroml.nml.nml.NeuroMLDocument :members: :undoc-members: + :show-inheritance: @@ -1194,6 +1338,7 @@ OpenState .. autoclass:: neuroml.nml.nml.OpenState :members: :undoc-members: + :show-inheritance: @@ -1203,6 +1348,7 @@ Parameter .. autoclass:: neuroml.nml.nml.Parameter :members: :undoc-members: + :show-inheritance: @@ -1212,6 +1358,7 @@ Path .. autoclass:: neuroml.nml.nml.Path :members: :undoc-members: + :show-inheritance: @@ -1221,6 +1368,7 @@ PinskyRinzelCA3Cell .. autoclass:: neuroml.nml.nml.PinskyRinzelCA3Cell :members: :undoc-members: + :show-inheritance: @@ -1230,6 +1378,7 @@ PlasticityMechanism .. autoclass:: neuroml.nml.nml.PlasticityMechanism :members: :undoc-members: + :show-inheritance: @@ -1239,6 +1388,7 @@ Point3DWithDiam .. autoclass:: neuroml.nml.nml.Point3DWithDiam :members: :undoc-members: + :show-inheritance: @@ -1248,6 +1398,7 @@ PoissonFiringSynapse .. autoclass:: neuroml.nml.nml.PoissonFiringSynapse :members: :undoc-members: + :show-inheritance: @@ -1257,6 +1408,7 @@ Population .. autoclass:: neuroml.nml.nml.Population :members: :undoc-members: + :show-inheritance: @@ -1266,6 +1418,7 @@ Projection .. autoclass:: neuroml.nml.nml.Projection :members: :undoc-members: + :show-inheritance: @@ -1275,6 +1428,7 @@ Property .. autoclass:: neuroml.nml.nml.Property :members: :undoc-members: + :show-inheritance: @@ -1284,6 +1438,7 @@ ProximalDetails .. autoclass:: neuroml.nml.nml.ProximalDetails :members: :undoc-members: + :show-inheritance: @@ -1293,6 +1448,7 @@ PulseGenerator .. autoclass:: neuroml.nml.nml.PulseGenerator :members: :undoc-members: + :show-inheritance: @@ -1302,6 +1458,7 @@ PulseGeneratorDL .. autoclass:: neuroml.nml.nml.PulseGeneratorDL :members: :undoc-members: + :show-inheritance: @@ -1311,6 +1468,7 @@ Q10ConductanceScaling .. autoclass:: neuroml.nml.nml.Q10ConductanceScaling :members: :undoc-members: + :show-inheritance: @@ -1320,6 +1478,7 @@ Q10Settings .. autoclass:: neuroml.nml.nml.Q10Settings :members: :undoc-members: + :show-inheritance: @@ -1329,6 +1488,7 @@ RampGenerator .. autoclass:: neuroml.nml.nml.RampGenerator :members: :undoc-members: + :show-inheritance: @@ -1338,6 +1498,7 @@ RampGeneratorDL .. autoclass:: neuroml.nml.nml.RampGeneratorDL :members: :undoc-members: + :show-inheritance: @@ -1347,6 +1508,7 @@ RandomLayout .. autoclass:: neuroml.nml.nml.RandomLayout :members: :undoc-members: + :show-inheritance: @@ -1356,6 +1518,7 @@ ReactionScheme .. autoclass:: neuroml.nml.nml.ReactionScheme :members: :undoc-members: + :show-inheritance: @@ -1365,6 +1528,7 @@ Region .. autoclass:: neuroml.nml.nml.Region :members: :undoc-members: + :show-inheritance: @@ -1374,6 +1538,7 @@ Requirement .. autoclass:: neuroml.nml.nml.Requirement :members: :undoc-members: + :show-inheritance: @@ -1383,6 +1548,7 @@ Resistivity .. autoclass:: neuroml.nml.nml.Resistivity :members: :undoc-members: + :show-inheritance: @@ -1392,6 +1558,7 @@ ReverseTransition .. autoclass:: neuroml.nml.nml.ReverseTransition :members: :undoc-members: + :show-inheritance: @@ -1401,6 +1568,7 @@ Segment .. autoclass:: neuroml.nml.nml.Segment :members: :undoc-members: + :show-inheritance: @@ -1410,6 +1578,7 @@ SegmentEndPoint .. autoclass:: neuroml.nml.nml.SegmentEndPoint :members: :undoc-members: + :show-inheritance: @@ -1419,6 +1588,7 @@ SegmentGroup .. autoclass:: neuroml.nml.nml.SegmentGroup :members: :undoc-members: + :show-inheritance: @@ -1428,6 +1598,7 @@ SegmentParent .. autoclass:: neuroml.nml.nml.SegmentParent :members: :undoc-members: + :show-inheritance: @@ -1437,6 +1608,7 @@ SilentSynapse .. autoclass:: neuroml.nml.nml.SilentSynapse :members: :undoc-members: + :show-inheritance: @@ -1446,6 +1618,7 @@ SineGenerator .. autoclass:: neuroml.nml.nml.SineGenerator :members: :undoc-members: + :show-inheritance: @@ -1455,6 +1628,7 @@ SineGeneratorDL .. autoclass:: neuroml.nml.nml.SineGeneratorDL :members: :undoc-members: + :show-inheritance: @@ -1464,6 +1638,7 @@ Space .. autoclass:: neuroml.nml.nml.Space :members: :undoc-members: + :show-inheritance: @@ -1473,6 +1648,7 @@ SpaceStructure .. autoclass:: neuroml.nml.nml.SpaceStructure :members: :undoc-members: + :show-inheritance: @@ -1482,6 +1658,7 @@ Species .. autoclass:: neuroml.nml.nml.Species :members: :undoc-members: + :show-inheritance: @@ -1491,6 +1668,7 @@ SpecificCapacitance .. autoclass:: neuroml.nml.nml.SpecificCapacitance :members: :undoc-members: + :show-inheritance: @@ -1500,6 +1678,7 @@ Spike .. autoclass:: neuroml.nml.nml.Spike :members: :undoc-members: + :show-inheritance: @@ -1509,6 +1688,7 @@ SpikeArray .. autoclass:: neuroml.nml.nml.SpikeArray :members: :undoc-members: + :show-inheritance: @@ -1518,6 +1698,7 @@ SpikeGenerator .. autoclass:: neuroml.nml.nml.SpikeGenerator :members: :undoc-members: + :show-inheritance: @@ -1527,6 +1708,7 @@ SpikeGeneratorPoisson .. autoclass:: neuroml.nml.nml.SpikeGeneratorPoisson :members: :undoc-members: + :show-inheritance: @@ -1536,6 +1718,7 @@ SpikeGeneratorRandom .. autoclass:: neuroml.nml.nml.SpikeGeneratorRandom :members: :undoc-members: + :show-inheritance: @@ -1545,6 +1728,7 @@ SpikeGeneratorRefPoisson .. autoclass:: neuroml.nml.nml.SpikeGeneratorRefPoisson :members: :undoc-members: + :show-inheritance: @@ -1554,6 +1738,7 @@ SpikeSourcePoisson .. autoclass:: neuroml.nml.nml.SpikeSourcePoisson :members: :undoc-members: + :show-inheritance: @@ -1563,6 +1748,7 @@ SpikeThresh .. autoclass:: neuroml.nml.nml.SpikeThresh :members: :undoc-members: + :show-inheritance: @@ -1572,6 +1758,7 @@ Standalone .. autoclass:: neuroml.nml.nml.Standalone :members: :undoc-members: + :show-inheritance: @@ -1581,6 +1768,7 @@ StateVariable .. autoclass:: neuroml.nml.nml.StateVariable :members: :undoc-members: + :show-inheritance: @@ -1590,6 +1778,7 @@ SubTree .. autoclass:: neuroml.nml.nml.SubTree :members: :undoc-members: + :show-inheritance: @@ -1599,6 +1788,7 @@ SynapticConnection .. autoclass:: neuroml.nml.nml.SynapticConnection :members: :undoc-members: + :show-inheritance: @@ -1608,6 +1798,7 @@ TauInfTransition .. autoclass:: neuroml.nml.nml.TauInfTransition :members: :undoc-members: + :show-inheritance: @@ -1617,6 +1808,7 @@ TimeDerivative .. autoclass:: neuroml.nml.nml.TimeDerivative :members: :undoc-members: + :show-inheritance: @@ -1626,6 +1818,7 @@ TimedSynapticInput .. autoclass:: neuroml.nml.nml.TimedSynapticInput :members: :undoc-members: + :show-inheritance: @@ -1635,6 +1828,7 @@ TransientPoissonFiringSynapse .. autoclass:: neuroml.nml.nml.TransientPoissonFiringSynapse :members: :undoc-members: + :show-inheritance: @@ -1644,6 +1838,7 @@ UnstructuredLayout .. autoclass:: neuroml.nml.nml.UnstructuredLayout :members: :undoc-members: + :show-inheritance: @@ -1653,6 +1848,7 @@ VariableParameter .. autoclass:: neuroml.nml.nml.VariableParameter :members: :undoc-members: + :show-inheritance: @@ -1662,6 +1858,7 @@ VoltageClamp .. autoclass:: neuroml.nml.nml.VoltageClamp :members: :undoc-members: + :show-inheritance: @@ -1671,6 +1868,7 @@ VoltageClampTriple .. autoclass:: neuroml.nml.nml.VoltageClampTriple :members: :undoc-members: + :show-inheritance: @@ -1680,6 +1878,7 @@ basePyNNCell .. autoclass:: neuroml.nml.nml.basePyNNCell :members: :undoc-members: + :show-inheritance: @@ -1689,6 +1888,7 @@ basePyNNIaFCell .. autoclass:: neuroml.nml.nml.basePyNNIaFCell :members: :undoc-members: + :show-inheritance: @@ -1698,5 +1898,6 @@ basePyNNIaFCondCell .. autoclass:: neuroml.nml.nml.basePyNNIaFCondCell :members: :undoc-members: + :show-inheritance: From 891cbeb2131701dd7c57ffb6245fc1478b76bf04 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Wed, 17 Nov 2021 17:25:57 +0000 Subject: [PATCH 119/136] docs: change class doc format --- doc/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index c981929c..ca9034f4 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -67,8 +67,8 @@ # The full version, including alpha/beta/rc tags. release = version -# autodoc_class_signature = "separated" -autoclass_content = "both" +autodoc_class_signature = "mixed" +autoclass_content = "class" # gds specific members to exclude from nml.py doc autodoc_default_options = { From 353d1c9465c4920814599f9c31029805cf8d09a4 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Thu, 18 Nov 2021 10:45:52 +0000 Subject: [PATCH 120/136] chore: sync XSD --- neuroml/nml/NeuroML_v2.2.xsd | 2906 +++++++++++++--------------------- 1 file changed, 1091 insertions(+), 1815 deletions(-) diff --git a/neuroml/nml/NeuroML_v2.2.xsd b/neuroml/nml/NeuroML_v2.2.xsd index e719f3f1..8654a3cc 100644 --- a/neuroml/nml/NeuroML_v2.2.xsd +++ b/neuroml/nml/NeuroML_v2.2.xsd @@ -63,7 +63,7 @@ - + @@ -201,12 +201,6 @@ A property ( a **tag** and **value** pair ), which can be on any **baseStandalone** either as a direct child, or within an **Annotation** . Generally something which helps the visual display or facilitates simulation of a Component, but is not a core physiological property. Common examples include: **numberInternalDivisions,** equivalent of nseg in NEURON; **radius,** for a radius to use in graphical displays for abstract cells ( i. e. without defined morphologies ); **color,** the color to use for a **Population** or **populationList** of cells; **recommended_dt_ms,** the recommended timestep to use for simulating a **Network** , **recommended_duration_ms** the recommended duration to use when running a **Network** - - - - - - @@ -215,12 +209,6 @@ A structured annotation containing metadata, specifically RDF or **property** elements - - - - - - @@ -518,14 +506,12 @@ A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them +\n +:param conductance: +:type conductance: conductance + - - - - - - @@ -539,14 +525,12 @@ Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. +\n +:param conductance: +:type conductance: conductance + - - - - - - @@ -568,14 +552,12 @@ Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. +\n +:param conductance: +:type conductance: conductance + - - - - - - @@ -583,21 +565,17 @@ Same as **ionChannel** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. +\n +:param vShift: +:type vShift: voltage +:param conductance: +:type conductance: conductance + - - - - - - - - - (dimension: voltage) - - + @@ -611,23 +589,16 @@ A value for the conductance scaling which varies as a standard function of the difference between the current temperature, **temperature,** and the temperature at which the conductance was originally determined, **experimentalTemp** +\n +:param q10Factor: +:type q10Factor: none +:param experimentalTemp: +:type experimentalTemp: temperature + - - - - - - - (dimension: none) - - - - - (dimension: temperature) - - - + + @@ -644,44 +615,38 @@ A **KSState** with **relativeConductance** of 0 +\n +:param relativeConductance: +:type relativeConductance: none + - - - - - A **KSState** with **relativeConductance** of 1 +\n +:param relativeConductance: +:type relativeConductance: none + - - - - - A forward only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** - - - - @@ -692,17 +657,12 @@ - A reverse only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** - - - - @@ -713,7 +673,6 @@ - @@ -726,10 +685,6 @@ KS Transition specified in terms of time constant **tau** and steady state **inf** - - - - @@ -740,17 +695,16 @@ - A gate which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them +\n +:param instances: +:type instances: none + - - - - @@ -766,7 +720,6 @@ - @@ -792,12 +745,12 @@ Gate which follows the general Hodgkin Huxley formalism +\n +:param instances: +:type instances: none + - - - - @@ -810,17 +763,16 @@ - Gate which follows the general Hodgkin Huxley formalism +\n +:param instances: +:type instances: none + - - - - @@ -833,17 +785,16 @@ - Gate which follows the general Hodgkin Huxley formalism +\n +:param instances: +:type instances: none + - - - - @@ -858,17 +809,16 @@ - Gate which follows the general Hodgkin Huxley formalism +\n +:param instances: +:type instances: none + - - - - @@ -882,17 +832,16 @@ - Gate which follows the general Hodgkin Huxley formalism +\n +:param instances: +:type instances: none + - - - - @@ -906,17 +855,16 @@ - Gate which follows the general Hodgkin Huxley formalism but is instantaneous, so tau = 0 and gate follows exactly inf value +\n +:param instances: +:type instances: none + - - - - @@ -927,17 +875,16 @@ - Gate composed of subgates contributing with fractional conductance +\n +:param instances: +:type instances: none + - - - - @@ -948,7 +895,6 @@ - @@ -999,72 +945,52 @@ Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** +\n +:param restingConc: +:type restingConc: concentration +:param decayConstant: +:type decayConstant: time +:param shellThickness: +:type shellThickness: length + - - - - - Should not be required, as it's present on the species element! - - - - - (dimension: concentration) - - - - - (dimension: time) - - - - - (dimension: length) - + + + + - Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. +\n +:param restingConc: +:type restingConc: concentration +:param decayConstant: +:type decayConstant: time +:param rho: +:type rho: rho_factor + - - - - - Should not be required, as it's present on the species element! - - - - - (dimension: concentration) - - - - - (dimension: time) - - - - - (dimension: rho_factor) - + + + + - @@ -1074,341 +1000,251 @@ Base type for all synapses, i. e. ComponentTypes which produce a current ( dimension current ) and change Dynamics in response to an incoming event. cno_0000009 - - - - - Base type for synapses with a dependence on membrane potential - - - - - Synapse model which produces a synaptic current. - - - - - Synapse model which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 +\n +:param gbase: Baseline conductance, generally the maximum conductance following a single spike +:type gbase: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + - - - - - - - (dimension: conductance): Baseline conductance, generally the maximum conductance following a single spike - - - - - (dimension: voltage): Reversal potential of the synapse - - + + - Synapse model suited for a sum of two expTwoSynapses which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 +\n +:param gbase1: Baseline conductance 1 +:type gbase1: conductance +:param gbase2: Baseline conductance 2 +:type gbase2: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + - - - - - - - (dimension: conductance): Baseline conductance 1 - - - - - (dimension: conductance): Baseline conductance 2 - - - - - (dimension: voltage): Reversal potential of the synapse - - + + + - Gap junction/single electrical connection +\n +:param conductance: +:type conductance: conductance + - - - - - - Gap junction/single electrical connection - - - - (dimension: conductance) - - + - Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection. - - - - - - Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection (continuousConnection). - - Behaves just like a one way gap junction. +\n +:param conductance: +:type conductance: conductance + - - - - - - Behaves just like a one way gap junction. - - - - (dimension: conductance) - - + - - Graded/analog synapse. Based on synapse in Methods of http://www. nature. com/neuro/journal/v7/n12/abs/nn1352. html + Graded/analog synapse. Based on synapse in Methods of http://www. nature.com/neuro/journal/v7/n12/abs/nn1352.html +\n +:param conductance: +:type conductance: conductance +:param delta: Slope of the activation curve +:type delta: voltage +:param k: Rate constant for transmitter-receptor dissociation rate +:type k: per_time +:param Vth: The half-activation voltage of the synapse +:type Vth: voltage +:param erev: The reversal potential of the synapse +:type erev: voltage + - - - - - - Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html. - - - - (dimension: conductance) - - - - - (dimension: voltage): Slope of the activation curve - - - - - (dimension: voltage): The half-activation voltage of the synapse - - - - - (dimension: per_time): Rate constant for transmitter-receptor dissociation rate - - - - - (dimension: voltage): The reversal potential of the synapse - - + + + + + - Alpha current synapse: rise time and decay time are both **tau.** +\n +:param tau: Time course for rise and decay +:type tau: time +:param ibase: Baseline current increase after receiving a spike +:type ibase: current + - - - - - - - (dimension: time): Time course for rise and decay - - - - - (dimension: current): Baseline current increase after receiving a spike - - + + - Ohmic synapse model where rise time and decay time are both **tau.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** +\n +:param tau: Time course of rise/decay +:type tau: time +:param gbase: Baseline conductance, generally the maximum conductance following a single spike +:type gbase: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + - - - - - - - (dimension: time): Time course of rise/decay - - + - Ohmic synapse model whose conductance rises instantaneously by ( **gbase** * **weight** ) on receiving an event, and which decays exponentially to zero with time course **tauDecay** +\n +:param tauDecay: Time course of decay +:type tauDecay: time +:param gbase: Baseline conductance, generally the maximum conductance following a single spike +:type gbase: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + - - - - - - - (dimension: time): Time course of decay - - + - Ohmic synapse model whose conductance waveform on receiving an event has a rise time of **tauRise** and a decay time of **tauDecay.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** +\n +:param tauRise: +:type tauRise: time +:param tauDecay: +:type tauDecay: time +:param gbase: Baseline conductance, generally the maximum conductance following a single spike +:type gbase: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + - - - - - - - (dimension: time) - - - - - (dimension: time) - - + + - Ohmic synapse similar to expTwoSynapse but consisting of two components that can differ in decay times and max conductances but share the same rise time. +\n +:param tauRise: +:type tauRise: time +:param tauDecay1: +:type tauDecay1: time +:param tauDecay2: +:type tauDecay2: time +:param gbase1: Baseline conductance 1 +:type gbase1: conductance +:param gbase2: Baseline conductance 2 +:type gbase2: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + - - - - - - - (dimension: time) - - - - - (dimension: time) - - - - - (dimension: time) - - + + + - Synapse consisting of two independent synaptic mechanisms ( e. g. AMPA-R and NMDA-R ), which can be easily colocated in connections - - - - @@ -1417,17 +1253,22 @@ - Biexponential synapse that allows for optional block and plasticity mechanisms, which can be expressed as child elements. +\n +:param tauRise: +:type tauRise: time +:param tauDecay: +:type tauDecay: time +:param gbase: Baseline conductance, generally the maximum conductance following a single spike +:type gbase: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + - - - - @@ -1436,7 +1277,6 @@ - @@ -1470,500 +1310,371 @@ Base type of any cell ( e. g. point neuron like **izhikevich2007Cell** , or a morphologically detailed **Cell** with **segment** s ) which can be used in a **population** - - - - - Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time constant **tau** +\n +:param leakReversal: +:type leakReversal: voltage +:param tau: +:type tau: time +:param thresh: The membrane potential at which to emit a spiking event and reset voltage +:type thresh: voltage +:param reset: The value the membrane potential is reset to on spiking +:type reset: voltage + - - - - - - - (dimension: voltage) - - + - - - (dimension: time) - - + - Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time course **tau.** It has a refractory period of **refract** after spiking +\n +:param refract: +:type refract: time +:param leakReversal: +:type leakReversal: voltage +:param tau: +:type tau: time +:param thresh: The membrane potential at which to emit a spiking event and reset voltage +:type thresh: voltage +:param reset: The value the membrane potential is reset to on spiking +:type reset: voltage + - - - - - - - (dimension: time) - - + - Integrate and fire cell with capacitance **C,** **leakConductance** and **leakReversal** +\n +:param leakConductance: +:type leakConductance: conductance +:param leakReversal: +:type leakReversal: voltage +:param thresh: +:type thresh: voltage +:param reset: +:type reset: voltage +:param C: Total capacitance of the cell membrane +:type C: capacitance + - - - - - - - (dimension: voltage) - - + - - - (dimension: conductance) - - + - Integrate and fire cell with capacitance **C,** **leakConductance,** **leakReversal** and refractory period **refract** +\n +:param refract: +:type refract: time +:param leakConductance: +:type leakConductance: conductance +:param leakReversal: +:type leakReversal: voltage +:param thresh: +:type thresh: voltage +:param reset: +:type reset: voltage +:param C: Total capacitance of the cell membrane +:type C: capacitance + - - - - - - - (dimension: time) - - + - - Cell based on the 2003 model of Izhikevich, see http://izhikevich. org/publications/spikes. htm + Cell based on the 2003 model of Izhikevich, see http://izhikevich.org/publications/spikes.htm +\n +:param v0: Initial membrane potential +:type v0: voltage +:param a: Time scale of the recovery variable U +:type a: none +:param b: Sensitivity of U to the subthreshold fluctuations of the membrane potential v +:type b: none +:param c: After-spike reset value of v +:type c: none +:param d: After-spike reset of u +:type d: none +:param thresh: Spike threshold +:type thresh: voltage + + + + + + + + + + + + + - - - + Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) +\n +:param C: Total capacitance of the cell membrane +:type C: capacitance + + + - - - (dimension: voltage): Initial membrane potential - - - + - (dimension: voltage): Spike threshold - - - - - (dimension: none): Time scale of the recovery variable U - - - - - (dimension: none): Sensitivity of U to the subthreshold fluctuations of the membrane potential v - - - - - (dimension: none): After-spike reset value of v - - - - - (dimension: none): After-spike reset of u + + + - - - - - Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) - - - - - - - - - - - (dimension: capacitance): Total capacitance of the cell membrane - - - - - - - - - - Cell based on the modified Izhikevich model in Izhikevich 2007, Dynamical systems in neuroscience, MIT Press +\n +:param v0: +:type v0: voltage +:param k: +:type k: conductance_per_voltage +:param vr: +:type vr: voltage +:param vt: +:type vt: voltage +:param vpeak: +:type vpeak: voltage +:param a: +:type a: per_time +:param b: +:type b: conductance +:param c: +:type c: voltage +:param d: +:type d: current +:param C: Total capacitance of the cell membrane +:type C: capacitance + - - - - - - - (dimension: voltage) - - - - - (dimension: conductance_per_voltage) - - - - - (dimension: voltage) - - - - - (dimension: voltage) - - - - - (dimension: voltage) - - - - - (dimension: per_time) - - - - - (dimension: conductance) - - - - - (dimension: voltage) - - - - - (dimension: current) - - + + + + + + + + + - Model based on Brette R and Gerstner W ( 2005 ) Adaptive Exponential Integrate-and-Fire Model as an Effective Description of Neuronal Activity. J Neurophysiol 94:3637-3642 +\n +:param gL: +:type gL: conductance +:param EL: +:type EL: voltage +:param VT: +:type VT: voltage +:param thresh: +:type thresh: voltage +:param reset: +:type reset: voltage +:param delT: +:type delT: voltage +:param tauw: +:type tauw: time +:param refract: +:type refract: time +:param a: +:type a: conductance +:param b: +:type b: current +:param C: Total capacitance of the cell membrane +:type C: capacitance + - - - - - - - (dimension: conductance) - - - - - (dimension: voltage) - - - - - (dimension: voltage) - - - - - (dimension: voltage) - - - - - (dimension: voltage) - - - - - (dimension: voltage) - - - - - (dimension: time) - - - - - (dimension: time) - - - - - (dimension: conductance) - - - - - (dimension: current) - - + + + + + + + + + + - - Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github. com/NeuroML/NeuroML2/issues/42 ) + Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github.com/NeuroML/NeuroML2/issues/42 ) +\n +:param I: +:type I: none + - - - - - - - (dimension: none) - - + - The Fitzhugh Nagumo model is a two-dimensional simplification of the Hodgkin-Huxley model of spike generation in squid giant axons. This system was suggested by FitzHugh ( FitzHugh R. [1961]: Impulses and physiological states in theoretical models of nerve membrane. Biophysical J. 1:445-466 ), who called it " Bonhoeffer-van der Pol model ", and the equivalent circuit by Nagumo et al. ( Nagumo J. , Arimoto S. , and Yoshizawa S. [1962] An active pulse transmission line simulating nerve axon. Proc IRE. 50:2061-2070. 1962 ). This version corresponds to the one described in FitzHugh R. [1969]: Mathematical models of excitation and propagation in nerve. Chapter 1 ( pp. 1-85 in H. P. Schwan, ed. Biological Engineering, McGraw-Hill Book Co. , N. Y. ) +\n +:param a: +:type a: none +:param b: +:type b: none +:param I: plays the role of an external injected current +:type I: none +:param phi: +:type phi: none +:param V0: +:type V0: none +:param W0: +:type W0: none + - - - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none): plays the role of an external injected current - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - + + + + + + - - Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github. com/OpenSourceBrain/PinskyRinzelModel + Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github.com/OpenSourceBrain/PinskyRinzelModel +\n +:param iSoma: +:type iSoma: currentDensity +:param iDend: +:type iDend: currentDensity +:param gLs: +:type gLs: conductanceDensity +:param gLd: +:type gLd: conductanceDensity +:param gNa: +:type gNa: conductanceDensity +:param gKdr: +:type gKdr: conductanceDensity +:param gCa: +:type gCa: conductanceDensity +:param gKahp: +:type gKahp: conductanceDensity +:param gKC: +:type gKC: conductanceDensity +:param gc: +:type gc: conductanceDensity +:param eNa: +:type eNa: voltage +:param eCa: +:type eCa: voltage +:param eK: +:type eK: voltage +:param eL: +:type eL: voltage +:param pp: +:type pp: none +:param cm: +:type cm: specificCapacitance +:param alphac: +:type alphac: none +:param betac: +:type betac: none +:param gNmda: +:type gNmda: conductanceDensity +:param gAmpa: +:type gAmpa: conductanceDensity +:param qd0: +:type qd0: none + - - - - - - - (dimension: currentDensity) - - - - - (dimension: currentDensity) - - - - - (dimension: conductanceDensity) - - - - - (dimension: conductanceDensity) - - - - - (dimension: conductanceDensity) - - - - - (dimension: conductanceDensity) - - - - - (dimension: conductanceDensity) - - - - - (dimension: conductanceDensity) - - - - - (dimension: conductanceDensity) - - - - - (dimension: conductanceDensity) - - - - - (dimension: conductanceDensity) - - - - - (dimension: conductanceDensity) - - - - - (dimension: voltage) - - - - - (dimension: voltage) - - - - - (dimension: voltage) - - - - - (dimension: voltage) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: specificCapacitance) - - + + + + + + + + + + + + + + + + + + + + + - Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. - - - - @@ -1972,31 +1683,20 @@ - Should only be used if morphology element is outside the cell. - This points to the id of the morphology - - + - Should only be used if biophysicalProperties element is outside the cell. - This points to the id of the biophysicalProperties - - + - Variant of cell with two independent Ca2+ pools. Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. - - - - @@ -2004,19 +1704,12 @@ - The collection of **segment** s which specify the 3D structure of the cell, along with a number of **segmentGroup** s - - - - - - @@ -2031,10 +1724,6 @@ A segment defines the smallest unit within a possibly branching structure ( **morphology** ), such as a dendrite or axon. Its **id** should be a nonnegative integer ( usually soma/root = 0 ). Its end points are given by the **proximal** and **distal** points. The **proximal** point can be omitted, usually because it is the same as a point on the **parent** segment, see **proximal** for details. **parent** specifies the parent segment. The first segment of a **cell** ( with no **parent** ) usually represents the soma. The shape is normally a cylinder ( radii of the **proximal** and **distal** equal, but positions different ) or a conical frustum ( radii and positions different ). If the x, y, x positions of the **proximal** and **distal** are equal, the segment can be interpreted as a sphere, and in this case the radii of these points must be equal. NOTE: LEMS does not yet support multicompartmental modelling, so the Dynamics here is only appropriate for single compartment modelling. - - - - @@ -2046,7 +1735,6 @@ - @@ -2055,44 +1743,28 @@ Base type for ComponentTypes which specify an ( **x,** **y,** **z** ) coordinate along with a **diameter.** Note: no dimension used in the attributes for these coordinates! These are assumed to have dimension micrometer ( 10^-6 m ). This is due to micrometers being the default option for the majority of neuronal morphology formats, and dimensions are omitted here to facilitate reading and writing of morphologies in NeuroML. +\n +:param x: x coordinate of the point. Note: no dimension used, see description of **point3DWithDiam** for details. +:type x: none +:param y: y coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. +:type y: none +:param z: z coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. +:type z: none +:param diameter: Diameter of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. +:type diameter: none + - - - - - - - - - (dimension: none): x coordinate of the point. Note: no dimension used, see description of **point3DWithDiam** for details. - - - - - (dimension: none): y coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. - - - - - (dimension: none): z coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. - - - - - (dimension: none): Diameter of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. - - + + + + A method to describe a group of **segment** s in a **morphology** , e. g. soma_group, dendrite_group, axon_group. While a name is useful to describe the group, the **neuroLexId** attribute can be used to explicitly specify the meaning of the group, e. g. sao1044911821 for 'Neuronal Cell Body', sao1211023249 for 'Dendrite'. The **segment** s in this group can be specified as: a list of individual **member** segments; a **path** , all of the segments along which should be included; a **subTree** of the **cell** to include; other segmentGroups to **include** ( so all segments from those get included here ). An **inhomogeneousParameter** can be defined on the region of the cell specified by this group ( see **variableParameter** for usage ). - - - - @@ -2107,17 +1779,12 @@ - An inhomogeneous parameter specified across the **segmentGroup** ( see **variableParameter** for usage ). - - - - @@ -2128,7 +1795,6 @@ - @@ -2149,54 +1815,34 @@ A single identified **segment** which is part of the **segmentGroup** - - - - - Include all members of another **segmentGroup** in this group - - - - - Include all the **segment** s between those specified by **from** and **to** , inclusive - - - - - Include all the **segment** s distal to that specified by **from** in the **segmentGroup** - - - - - @@ -2209,12 +1855,6 @@ The biophysical properties of the **cell** , including the **membraneProperties** and the **intracellularProperties** - - - - - - @@ -2230,12 +1870,6 @@ The biophysical properties of the **cell** , including the **membraneProperties2CaPools** and the **intracellularProperties2CaPools** for a cell with two Ca pools - - - - - - @@ -2251,10 +1885,6 @@ Properties specific to the membrane, such as the **populations** of channels, **channelDensities,** **specificCapacitance,** etc. - - - - @@ -2271,17 +1901,12 @@ - Variant of membraneProperties with 2 independent Ca pools - - - - @@ -2290,167 +1915,112 @@ - Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction +\n +:param value: +:type value: voltage + - - - - - - - - - (dimension: voltage) - - + Capacitance per unit area +\n +:param value: +:type value: specificCapacitance + - - - - - - - - - (dimension: specificCapacitance) - - + Explicitly set initial membrane potential for the cell +\n +:param value: +:type value: voltage + - - - - - - - - - (dimension: voltage) - - + The resistivity, or specific axial resistance, of the cytoplasm +\n +:param value: +:type value: resistivity + - - - - - - - - - (dimension: resistivity) - - + Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** +\n +:param number: The number of channels present. This will be multiplied by the time varying conductance of the individual ion channel ( which extends **baseIonChannel** ) to produce the total conductance +:type number: none +:param erev: The reversal potential of the current produced +:type erev: voltage + - - - - - - - (dimension: none): The number of channels present. This will be multiplied by the time varying conductance of the individual ion channel ( which extends **baseIonChannel** ) to produce the total conductance - - - - - (dimension: voltage): The reversal potential of the current produced - - + + - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - + - Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON +\n +:param erev: The reversal potential of the current produced +:type erev: voltage + - - - - - - - (dimension: voltage): The reversal potential of the current produced - - + - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - + - Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - - - - @@ -2459,28 +2029,16 @@ - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - + - Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - - - - @@ -2489,28 +2047,22 @@ - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - + - Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** +\n +:param erev: The reversal potential of the current produced +:type erev: voltage +:param condDensity: +:type condDensity: conductanceDensity + - - - - @@ -2518,58 +2070,45 @@ - - - (dimension: voltage): The reversal potential of the current produced - - + - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - + - Same as **channelDensity** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. +\n +:param vShift: +:type vShift: voltage +:param erev: The reversal potential of the current produced +:type erev: voltage +:param condDensity: +:type condDensity: conductanceDensity + - - - - - - - (dimension: voltage) - - + - - Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github.com/OpenSourceBrain/ghk-nernst. +\n +:param condDensity: +:type condDensity: conductanceDensity + - - - - @@ -2582,79 +2121,58 @@ - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - + - - This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github. com/OpenSourceBrain/ghk-nernst. + This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github.com/OpenSourceBrain/ghk-nernst. +\n +:param condDensity: +:type condDensity: conductanceDensity + - - - - - - Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. + Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github.com/OpenSourceBrain/ghk-nernst. +\n +:param permeability: +:type permeability: permeability + - - - - - - - (dimension: permeability) - - + - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - + - - Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github. com/OpenSourceBrain/ghk-nernst. + Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github.com/OpenSourceBrain/ghk-nernst. +\n +:param condDensity: +:type condDensity: conductanceDensity + - - - - @@ -2667,47 +2185,29 @@ - Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - + - Specifies a **parameter** ( e. g. condDensity ) which can vary its value across a **segmentGroup.** The value is calculated from **value** attribute of the **inhomogeneousValue** subelement. This element is normally a child of **channelDensityNonUniform** , **channelDensityNonUniformNernst** or **channelDensityNonUniformGHK** and is used to calculate the value of the conductance, etc. which will vary on different parts of the cell. The **segmentGroup** specified here needs to define an **inhomogeneousParameter** ( referenced from **inhomogeneousParameter** in the **inhomogeneousValue** ), which calculates a **variable** ( e. g. p ) varying across the cell ( e. g. based on the path length from soma ), which is then used in the **value** attribute of the **inhomogeneousValue** ( so for example condDensity = f( p ) ) - - - - - Specifies the **value** of an **inhomogeneousParameter.** For usage see **variableParameter** - - - - - @@ -2763,31 +2253,21 @@ Biophysical properties related to the intracellular space within the **cell** , such as the **resistivity** and the list of ionic **species** present. **caConc** and **caConcExt** are explicitly exposed here to facilitate accessing these values from other Components, even though **caConcExt** is clearly not an intracellular property - - - - - Variant of intracellularProperties with 2 independent Ca pools - - - - - @@ -2826,234 +2306,158 @@ Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set +\n +:param delay: Delay before change in current. Current is zero prior to this. +:type delay: time +:param duration: Duration for holding current at amplitude. Current is zero after delay + duration. +:type duration: time +:param amplitude: Amplitude of current pulse +:type amplitude: current + - - - - - - - - - (dimension: time): Delay before change in current. Current is zero prior to this. - - - - - (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. - - - - - (dimension: current): Amplitude of current pulse - - + + + Dimensionless equivalent of **pulseGenerator** . Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set +\n +:param delay: Delay before change in current. Current is zero prior to this. +:type delay: time +:param duration: Duration for holding current at amplitude. Current is zero after delay + duration. +:type duration: time +:param amplitude: Amplitude of current pulse +:type amplitude: none + - - - - - - - - - (dimension: time): Delay before change in current. Current is zero prior to this. - - - - - (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. - - - - - (dimension: none): Amplitude of current pulse - - + + + Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set +\n +:param phase: Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) +:type phase: none +:param delay: Delay before change in current. Current is zero prior to this. +:type delay: time +:param duration: Duration for holding current at amplitude. Current is zero after delay + duration. +:type duration: time +:param amplitude: Maximum amplitude of current +:type amplitude: current +:param period: Time period of oscillation +:type period: time + - - - - - - - (dimension: time): Delay before change in current. Current is zero prior to this. - - - - - (dimension: none): Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) - - - - - (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. - - - - - (dimension: current): Maximum amplitude of current - - - - - (dimension: time): Time period of oscillation - - + + + + + - Dimensionless equivalent of **sineGenerator** . Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set +\n +:param phase: Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) +:type phase: none +:param delay: Delay before change in current. Current is zero prior to this. +:type delay: time +:param duration: Duration for holding current at amplitude. Current is zero after delay + duration. +:type duration: time +:param amplitude: Maximum amplitude of current +:type amplitude: none +:param period: Time period of oscillation +:type period: time + - - - - - - - (dimension: time): Delay before change in current. Current is zero prior to this. - - - - - (dimension: none): Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) - - - - - (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. - - - - - (dimension: none): Maximum amplitude of current - - - - - (dimension: time): Time period of oscillation - - + + + + + - Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set +\n +:param delay: Delay before change in current. Current is baselineAmplitude prior to this. +:type delay: time +:param duration: Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. +:type duration: time +:param startAmplitude: Amplitude of linearly varying current at time delay +:type startAmplitude: current +:param finishAmplitude: Amplitude of linearly varying current at time delay + duration +:type finishAmplitude: current +:param baselineAmplitude: Amplitude of current before time delay, and after time delay + duration +:type baselineAmplitude: current + - - - - - - - (dimension: time): Delay before change in current. Current is baselineAmplitude prior to this. - - - - - (dimension: time): Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. - - - - - (dimension: current): Amplitude of linearly varying current at time delay - - - - - (dimension: current): Amplitude of linearly varying current at time delay + duration - - - - - (dimension: current): Amplitude of current before time delay, and after time delay + duration - - + + + + + - Dimensionless equivalent of **rampGenerator** . Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the dimensionless current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set +\n +:param delay: Delay before change in current. Current is baselineAmplitude prior to this. +:type delay: time +:param duration: Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. +:type duration: time +:param startAmplitude: Amplitude of linearly varying current at time delay +:type startAmplitude: none +:param finishAmplitude: Amplitude of linearly varying current at time delay + duration +:type finishAmplitude: none +:param baselineAmplitude: Amplitude of current before time delay, and after time delay + duration +:type baselineAmplitude: none + - - - - - - - (dimension: time): Delay before change in current. Current is baselineAmplitude prior to this. - - - - - (dimension: time): Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. - - - - - (dimension: none): Amplitude of linearly varying current at time delay - - - - - (dimension: none): Amplitude of linearly varying current at time delay + duration - - - - - (dimension: none): Amplitude of current before time delay, and after time delay + duration - - + + + + + - Generates a current which is the sum of all its child **basePointCurrent** element, e. g. can be a combination of **pulseGenerator** , **sineGenerator** elements producing a single **i.** Scaled by **weight,** if set - - - - @@ -3063,17 +2467,12 @@ - Generates a current which is the sum of all its child **basePointCurrentDL** elements, e. g. can be a combination of **pulseGeneratorDL** , **sineGeneratorDL** elements producing a single **i.** Scaled by **weight,** if set - - - - @@ -3083,122 +2482,84 @@ - Voltage clamp. Applies a variable current **i** to try to keep parent at **targetVoltage.** Not yet fully tested!!! Consider using voltageClampTriple!! +\n +:param delay: Delay before change in current. Current is zero prior to this. +:type delay: time +:param duration: Duration for attempting to keep parent at targetVoltage. Current is zero after delay + duration. +:type duration: time +:param targetVoltage: Current will be applied to try to get parent to this target voltage +:type targetVoltage: voltage +:param simpleSeriesResistance: Current will be calculated by the difference in voltage between the target and parent, divided by this value +:type simpleSeriesResistance: resistance + - - - - - - - (dimension: time): Delay before change in current. Current is zero prior to this. - - - - - (dimension: time): Duration for attempting to keep parent at targetVoltage. Current is zero after delay + duration. - - - - - (dimension: voltage): Current will be applied to try to get parent to this target voltage - - - - - (dimension: resistance): Current will be calculated by the difference in voltage between the target and parent, divided by this value - - + + + + - Voltage clamp with 3 clamp levels. Applies a variable current **i** ( through **simpleSeriesResistance** ) to try to keep parent cell at **conditioningVoltage** until time **delay,** **testingVoltage** until **delay** + **duration,** and **returnVoltage** afterwards. Only enabled if **active** = 1. +\n +:param active: Whether the voltage clamp is active ( 1 ) or inactive ( 0 ). +:type active: none +:param delay: Delay before switching from conditioningVoltage to testingVoltage. +:type delay: time +:param duration: Duration to hold at testingVoltage. +:type duration: time +:param conditioningVoltage: Target voltage before time delay +:type conditioningVoltage: voltage +:param testingVoltage: Target voltage between times delay and delay + duration +:type testingVoltage: voltage +:param returnVoltage: Target voltage after time duration +:type returnVoltage: voltage +:param simpleSeriesResistance: Current will be calculated by the difference in voltage between the target and parent, divided by this value +:type simpleSeriesResistance: resistance + - - - - - - - (dimension: none): Whether the voltage clamp is active ( 1 ) or inactive ( 0 ). - - - - - (dimension: time): Delay before switching from conditioningVoltage to testingVoltage. - - - - - (dimension: time): Duration to hold at testingVoltage. - - - - - (dimension: voltage): Target voltage before time delay - - - - - (dimension: voltage): Target voltage between times delay and delay + duration - - - - - (dimension: voltage): Target voltage after time duration - - - - - (dimension: resistance): Current will be calculated by the difference in voltage between the target and parent, divided by this value - - + + + + + + + - Emits a single spike at the specified **time** +\n +:param time: Time at which to emit one spike event +:type time: time + - - - - - - - (dimension: time): Time at which to emit one spike event - - + - Set of spike ComponentTypes, each emitting one spike at a certain time. Can be used to feed a predetermined spike train into a cell - - - - @@ -3206,17 +2567,12 @@ - Spike array connected to a single **synapse,** producing a current triggered by each **spike** in the array. - - - - @@ -3226,146 +2582,111 @@ - Simple generator of spikes at a regular interval set by **period** +\n +:param period: Time between spikes. The first spike will be emitted after this time. +:type period: time + - - - - - - - (dimension: time): Time between spikes. The first spike will be emitted after this time. - - + - Generator of spikes with a random interspike interval of at least **minISI** and at most **maxISI** +\n +:param maxISI: Maximum interspike interval +:type maxISI: time +:param minISI: Minimum interspike interval +:type minISI: time + - - - - - - - (dimension: time): Maximum interspike interval - - - - - (dimension: time): Minimum interspike interval - - + + - Generator of spikes whose ISI is distributed according to an exponential PDF with scale: 1 / **averageRate** +\n +:param averageRate: The average rate at which spikes are emitted +:type averageRate: per_time + - - - - - - - (dimension: per_time): The average rate at which spikes are emitted - - + - Generator of spikes whose ISI distribution is the maximum entropy distribution over [ **minimumISI,** +infinity ) with mean: 1 / **averageRate** +\n +:param minimumISI: The minimum interspike interval +:type minimumISI: time +:param averageRate: The average rate at which spikes are emitted +:type averageRate: per_time + - - - - - - - (dimension: time): The minimum interspike interval - - + - Poisson spike generator firing at **averageRate,** which is connected to single **synapse** that is triggered every time a spike is generated, producing an input current. See also **transientPoissonFiringSynapse** . +\n +:param averageRate: The average rate at which spikes are emitted +:type averageRate: per_time + - - - - - - - (dimension: per_time): The average rate at which spikes are emitted - - + - Poisson spike generator firing at **averageRate** after a **delay** and for a **duration,** connected to single **synapse** that is triggered every time a spike is generated, providing an input current. Similar to ComponentType **poissonFiringSynapse** . +\n +:param averageRate: +:type averageRate: per_time +:param delay: +:type delay: time +:param duration: +:type duration: time + - - - - - - - (dimension: per_time) - - - - - (dimension: time) - - - - - (dimension: time) - - + + + - @@ -3375,10 +2696,6 @@ Network containing: **population** s ( potentially of type **populationList** , and so specifying a list of cell **location** s ); **projection** s ( with lists of **connection** s ) and/or **explicitConnection** s; and **inputList** s ( with lists of **input** s ) and/or **explicitInput** s. Note: often in NeuroML this will be of type **networkWithTemperature** if there are temperature dependent elements ( e. g. ion channels ). - - - - @@ -3403,7 +2720,6 @@ - @@ -3449,10 +2765,6 @@ Initial attempt to specify 3D region for placing cells. Work in progress. . . - - - - @@ -3462,17 +2774,16 @@ - A population of components, with just one parameter for the **size,** i. e. number of components to create. Note: quite often this is used with type= **populationList** which means the size is determined by the number of **instance** s ( with **location** s ) in the list. The **size** attribute is still set, and there will be a validation error if this does not match the number in the list. +\n +:param size: Number of instances of this Component to create when the population is instantiated +:type size: none + - - - - @@ -3480,16 +2791,11 @@ - - - (dimension: none): Number of instances of this Component to create when the population is instantiated - - + - @@ -3524,10 +2830,6 @@ Specifies a single instance of a component in a **population** ( placed at **location** ). - - - - @@ -3538,33 +2840,23 @@ - Specifies the ( x, y, z ) location of a single **instance** of a component in a **population** +\n +:param x: +:type x: none +:param y: +:type y: none +:param z: +:type z: none + - - - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - + + + @@ -3582,12 +2874,6 @@ Explicit event connection between named components, which gets processed via a new instance of a **synapse** component which is created on the target component - - - - - - @@ -3609,12 +2895,6 @@ Projection from one population, **presynapticPopulation** to another, **postsynapticPopulation,** through **synapse.** Contains lists of **connection** or **connectionWD** elements. - - - - - - @@ -3672,12 +2952,6 @@ Event connection directly between named components, which gets processed via a new instance of a **synapse** component which is created on the target component. Normally contained inside a **projection** element. - - - - - - @@ -3686,26 +2960,18 @@ Event connection between named components, which gets processed via a new instance of a synapse component which is created on the target component, includes setting of **weight** and **delay** for the synaptic connection +\n +:param weight: +:type weight: none +:param delay: +:type delay: time + - - - - - - - - - (dimension: none) - - - - - (dimension: time) - - + + @@ -3714,12 +2980,6 @@ A projection between **presynapticPopulation** to another **postsynapticPopulation** through gap junctions. - - - - - - @@ -3735,12 +2995,6 @@ To enable connections between populations through gap junctions. - - - - - - @@ -3752,12 +3006,6 @@ To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. - - - - - - @@ -3765,21 +3013,15 @@ To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. Includes setting of **weight** for the connection +\n +:param weight: +:type weight: none + - - - - - - - - - (dimension: none) - - + @@ -3788,12 +3030,6 @@ A projection between **presynapticPopulation** and **postsynapticPopulation** through components **preComponent** at the start and **postComponent** at the end of a **continuousConnection** or **continuousConnectionInstance** . Can be used for analog synapses. - - - - - - @@ -3809,12 +3045,6 @@ An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Can be used for analog synapses. - - - - - - @@ -3827,12 +3057,6 @@ An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. - - - - - - @@ -3840,21 +3064,15 @@ An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. Includes setting of **weight** for the connection +\n +:param weight: +:type weight: none + - - - - - - - - - (dimension: none) - - + @@ -3863,12 +3081,6 @@ An explicit input ( anything which extends **basePointCurrent** ) to a target cell in a population - - - - - - @@ -3878,12 +3090,6 @@ An explicit list of **input** s to a **population.** - - - - - - @@ -3900,12 +3106,6 @@ Specifies a single input to a **target,** optionally giving the **segmentId** ( default 0 ) and **fractionAlong** the segment ( default 0. 5 ). - - - - - - @@ -3915,21 +3115,15 @@ Specifies input lists. Can set **weight** to scale individual inputs. +\n +:param weight: +:type weight: none + - - - - - - - - - (dimension: none) - - + @@ -3938,405 +3132,487 @@ - Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble. org/trac/PyNN/wiki/StandardModels + Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble.org/trac/PyNN/wiki/StandardModels +\n +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + - - - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell - - - - - (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell - - - - - (dimension: none) - - + + + + + - Base type of any PyNN standard integrate and fire model +\n +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + - - - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - + + + + + - Base type of conductance based PyNN IaF cell models +\n +:param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_E: none +:param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_I: none +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + - - - - - - - (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell - - - - - (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell - - + + - Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic current +\n +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + - - - - - Leaky integrate and fire model with fixed threshold and decaying-exponential post-synaptic current +\n +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + - - - - - Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic conductance +\n +:param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_E: none +:param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_I: none +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + - - - - - Leaky integrate and fire model with fixed threshold and exponentially-decaying post-synaptic conductance +\n +:param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_E: none +:param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_I: none +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + - - - - - Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with exponentially-decaying post-synaptic conductance +\n +:param v_spike: +:type v_spike: none +:param delta_T: +:type delta_T: none +:param tau_w: +:type tau_w: none +:param a: +:type a: none +:param b: +:type b: none +:param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_E: none +:param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_I: none +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + - - - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - + + + + + - Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with alpha-function-shaped post-synaptic conductance +\n +:param v_spike: +:type v_spike: none +:param delta_T: +:type delta_T: none +:param tau_w: +:type tau_w: none +:param a: +:type a: none +:param b: +:type b: none +:param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_E: none +:param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_I: none +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + - - - - - Single-compartment Hodgkin-Huxley-type neuron with transient sodium and delayed-rectifier potassium currents using the ion channel models from Traub. +\n +:param gbar_K: +:type gbar_K: none +:param gbar_Na: +:type gbar_Na: none +:param g_leak: +:type g_leak: none +:param e_rev_K: +:type e_rev_K: none +:param e_rev_Na: +:type e_rev_Na: none +:param e_rev_leak: +:type e_rev_leak: none +:param v_offset: +:type v_offset: none +:param e_rev_E: +:type e_rev_E: none +:param e_rev_I: +:type e_rev_I: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + - - - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - - - - (dimension: none) - - + + + + + + + + + - Base type for all PyNN synapses. Note, the current **I** produced is dimensionless, but it requires a membrane potential **v** with dimension voltage +\n +:param tau_syn: +:type tau_syn: none + - - - - - - - (dimension: none) - - + - Conductance based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) +\n +:param e_rev: +:type e_rev: none +:param tau_syn: +:type tau_syn: none + - - - - - - - (dimension: none) - - + - Alpha synapse: rise time and decay time are both tau_syn. Conductance based synapse. +\n +:param e_rev: +:type e_rev: none +:param tau_syn: +:type tau_syn: none + - - - - - - - (dimension: none) - - + - Current based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) +\n +:param tau_syn: +:type tau_syn: none + - - - - - Alpha synapse: rise time and decay time are both tau_syn. Current based synapse. +\n +:param tau_syn: +:type tau_syn: none + - - - - - Spike source, generating spikes according to a Poisson process. +\n +:param start: +:type start: time +:param duration: +:type duration: time +:param rate: +:type rate: per_time + - - - - - - - (dimension: time) - - - - - (dimension: time) - - - - - (dimension: per_time) - - + + + - From 3b7306a66b0ff2f6fa1bcd26879680e36f1afe65 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Thu, 18 Nov 2021 10:45:59 +0000 Subject: [PATCH 121/136] chore: regenerate nml.py --- neuroml/nml/nml.py | 1223 ++++++++++++++++++++++++++++++-------------- 1 file changed, 853 insertions(+), 370 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 4921c3e6..80e988c3 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Wed Nov 17 17:20:08 2021 by generateDS.py version 2.40.3. +# Generated Thu Nov 18 10:42:12 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -4771,9 +4771,11 @@ def _buildChildren( class Q10ConductanceScaling(GeneratedsSuper): """Q10ConductanceScaling -- A value for the conductance scaling which varies as a standard function of the difference between the current temperature, **temperature,** and the temperature at which the conductance was originally determined, **experimentalTemp** - - * q10Factor -- (dimension: none) - * experimentalTemp -- (dimension: temperature) + \n + :param q10Factor: + :type q10Factor: none + :param experimentalTemp: + :type experimentalTemp: temperature """ @@ -7221,11 +7223,15 @@ def _buildChildren( class Point3DWithDiam(GeneratedsSuper): """Point3DWithDiam -- Base type for ComponentTypes which specify an ( **x,** **y,** **z** ) coordinate along with a **diameter.** Note: no dimension used in the attributes for these coordinates! These are assumed to have dimension micrometer ( 10^-6 m ). This is due to micrometers being the default option for the majority of neuronal morphology formats, and dimensions are omitted here to facilitate reading and writing of morphologies in NeuroML. - - * x -- (dimension: none): x coordinate of the point. Note: no dimension used, see description of **point3DWithDiam** for details. - * y -- (dimension: none): y coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. - * z -- (dimension: none): z coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. - * diameter -- (dimension: none): Diameter of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + \n + :param x: x coordinate of the point. Note: no dimension used, see description of **point3DWithDiam** for details. + :type x: none + :param y: y coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + :type y: none + :param z: z coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + :type z: none + :param diameter: Diameter of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + :type diameter: none """ @@ -9504,8 +9510,9 @@ def _buildChildren( class SpikeThresh(GeneratedsSuper): """SpikeThresh -- Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction - - * value -- (dimension: voltage) + \n + :param value: + :type value: voltage """ @@ -9747,8 +9754,9 @@ def _buildChildren( class SpecificCapacitance(GeneratedsSuper): """SpecificCapacitance -- Capacitance per unit area - - * value -- (dimension: specificCapacitance) + \n + :param value: + :type value: specificCapacitance """ @@ -10000,8 +10008,9 @@ def _buildChildren( class InitMembPotential(GeneratedsSuper): """InitMembPotential -- Explicitly set initial membrane potential for the cell - - * value -- (dimension: voltage) + \n + :param value: + :type value: voltage """ @@ -10247,8 +10256,9 @@ def _buildChildren( class Resistivity(GeneratedsSuper): """Resistivity -- The resistivity, or specific axial resistance, of the cytoplasm - - * value -- (dimension: resistivity) + \n + :param value: + :type value: resistivity """ @@ -10895,13 +10905,13 @@ def _buildChildren( class Species(GeneratedsSuper): - """Species -- Description of a chemical species identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration - - * ion -- Specifying the ion here again is redundant, the ion name should be the same as id. Kept for now - until LEMS implementation can select by id. TODO: remove. - - * initialConcentration -- (dimension: concentration) - * initialExtConcentration -- (dimension: concentration) + """Species -- Description of a chemical sp + ecies identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration + \n + :param initialConcentration: + :type initialConcentration: concentration + :param initialExtConcentration: + :type initialExtConcentration: concentration """ @@ -13065,10 +13075,13 @@ def __repr__(self): class Location(GeneratedsSuper): """Location -- Specifies the ( x, y, z ) location of a single **instance** of a component in a **population** - - * x -- (dimension: none) - * y -- (dimension: none) - * z -- (dimension: none) + \n + :param x: + :type x: none + :param y: + :type y: none + :param z: + :type z: none """ @@ -14158,8 +14171,9 @@ def __str__(self): class InputW(Input): """InputW -- Specifies input lists. Can set **weight** to scale individual inputs. - - * weight -- (dimension: none) + \n + :param weight: + :type weight: none """ @@ -15484,10 +15498,13 @@ def _buildChildren( class SpikeSourcePoisson(Standalone): """SpikeSourcePoisson -- Spike source, generating spikes according to a Poisson process. - - * start -- (dimension: time) - * duration -- (dimension: time) - * rate -- (dimension: per_time) + \n + :param start: + :type start: time + :param duration: + :type duration: time + :param rate: + :type rate: per_time """ @@ -16759,8 +16776,9 @@ def _buildChildren( class Population(Standalone): """Population -- A population of components, with just one parameter for the **size,** i. e. number of components to create. Note: quite often this is used with type= **populationList** which means the size is determined by the number of **instance** s ( with **location** s ) in the list. The **size** attribute is still set, and there will be a validation error if this does not match the number in the list. - - * size -- (dimension: none): Number of instances of this Component to create when the population is instantiated + \n + :param size: Number of instances of this Component to create when the population is instantiated + :type size: none """ @@ -18410,10 +18428,13 @@ def exportHdf5(self, h5file, h5Group): class TransientPoissonFiringSynapse(Standalone): """TransientPoissonFiringSynapse -- Poisson spike generator firing at **averageRate** after a **delay** and for a **duration,** connected to single **synapse** that is triggered every time a spike is generated, providing an input current. Similar to ComponentType **poissonFiringSynapse** . - - * averageRate -- (dimension: per_time) - * delay -- (dimension: time) - * duration -- (dimension: time) + \n + :param averageRate: + :type averageRate: per_time + :param delay: + :type delay: time + :param duration: + :type duration: time """ @@ -18780,8 +18801,9 @@ def _buildChildren( class PoissonFiringSynapse(Standalone): """PoissonFiringSynapse -- Poisson spike generator firing at **averageRate,** which is connected to single **synapse** that is triggered every time a spike is generated, producing an input current. See also **transientPoissonFiringSynapse** . - - * averageRate -- (dimension: per_time): The average rate at which spikes are emitted + \n + :param averageRate: The average rate at which spikes are emitted + :type averageRate: per_time """ @@ -19057,8 +19079,9 @@ def _buildChildren( class SpikeGeneratorPoisson(Standalone): """SpikeGeneratorPoisson -- Generator of spikes whose ISI is distributed according to an exponential PDF with scale: 1 / **averageRate** - - * averageRate -- (dimension: per_time): The average rate at which spikes are emitted + \n + :param averageRate: The average rate at which spikes are emitted + :type averageRate: per_time """ @@ -19311,9 +19334,11 @@ def _buildChildren( class SpikeGeneratorRandom(Standalone): """SpikeGeneratorRandom -- Generator of spikes with a random interspike interval of at least **minISI** and at most **maxISI** - - * maxISI -- (dimension: time): Maximum interspike interval - * minISI -- (dimension: time): Minimum interspike interval + \n + :param maxISI: Maximum interspike interval + :type maxISI: time + :param minISI: Minimum interspike interval + :type minISI: time """ @@ -19562,8 +19587,9 @@ def _buildChildren( class SpikeGenerator(Standalone): """SpikeGenerator -- Simple generator of spikes at a regular interval set by **period** - - * period -- (dimension: time): Time between spikes. The first spike will be emitted after this time. + \n + :param period: Time between spikes. The first spike will be emitted after this time. + :type period: time """ @@ -20243,8 +20269,9 @@ def _buildChildren( class Spike(BaseNonNegativeIntegerId): """Spike -- Emits a single spike at the specified **time** - - * time -- (dimension: time): Time at which to emit one spike event + \n + :param time: Time at which to emit one spike event + :type time: time """ @@ -20440,14 +20467,21 @@ def _buildChildren( class VoltageClampTriple(Standalone): """VoltageClampTriple -- Voltage clamp with 3 clamp levels. Applies a variable current **i** ( through **simpleSeriesResistance** ) to try to keep parent cell at **conditioningVoltage** until time **delay,** **testingVoltage** until **delay** + **duration,** and **returnVoltage** afterwards. Only enabled if **active** = 1. - - * active -- (dimension: none): Whether the voltage clamp is active ( 1 ) or inactive ( 0 ). - * delay -- (dimension: time): Delay before switching from conditioningVoltage to testingVoltage. - * duration -- (dimension: time): Duration to hold at testingVoltage. - * conditioningVoltage -- (dimension: voltage): Target voltage before time delay - * testingVoltage -- (dimension: voltage): Target voltage between times delay and delay + duration - * returnVoltage -- (dimension: voltage): Target voltage after time duration - * simpleSeriesResistance -- (dimension: resistance): Current will be calculated by the difference in voltage between the target and parent, divided by this value + \n + :param active: Whether the voltage clamp is active ( 1 ) or inactive ( 0 ). + :type active: none + :param delay: Delay before switching from conditioningVoltage to testingVoltage. + :type delay: time + :param duration: Duration to hold at testingVoltage. + :type duration: time + :param conditioningVoltage: Target voltage before time delay + :type conditioningVoltage: voltage + :param testingVoltage: Target voltage between times delay and delay + duration + :type testingVoltage: voltage + :param returnVoltage: Target voltage after time duration + :type returnVoltage: voltage + :param simpleSeriesResistance: Current will be calculated by the difference in voltage between the target and parent, divided by this value + :type simpleSeriesResistance: resistance """ @@ -20937,11 +20971,15 @@ def _buildChildren( class VoltageClamp(Standalone): """VoltageClamp -- Voltage clamp. Applies a variable current **i** to try to keep parent at **targetVoltage.** Not yet fully tested!!! Consider using voltageClampTriple!! - - * delay -- (dimension: time): Delay before change in current. Current is zero prior to this. - * duration -- (dimension: time): Duration for attempting to keep parent at targetVoltage. Current is zero after delay + duration. - * targetVoltage -- (dimension: voltage): Current will be applied to try to get parent to this target voltage - * simpleSeriesResistance -- (dimension: resistance): Current will be calculated by the difference in voltage between the target and parent, divided by this value + \n + :param delay: Delay before change in current. Current is zero prior to this. + :type delay: time + :param duration: Duration for attempting to keep parent at targetVoltage. Current is zero after delay + duration. + :type duration: time + :param targetVoltage: Current will be applied to try to get parent to this target voltage + :type targetVoltage: voltage + :param simpleSeriesResistance: Current will be calculated by the difference in voltage between the target and parent, divided by this value + :type simpleSeriesResistance: resistance """ @@ -21860,12 +21898,17 @@ def _buildChildren( class RampGeneratorDL(Standalone): """RampGeneratorDL -- Dimensionless equivalent of **rampGenerator** . Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the dimensionless current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set - - * delay -- (dimension: time): Delay before change in current. Current is baselineAmplitude prior to this. - * duration -- (dimension: time): Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. - * startAmplitude -- (dimension: none): Amplitude of linearly varying current at time delay - * finishAmplitude -- (dimension: none): Amplitude of linearly varying current at time delay + duration - * baselineAmplitude -- (dimension: none): Amplitude of current before time delay, and after time delay + duration + \n + :param delay: Delay before change in current. Current is baselineAmplitude prior to this. + :type delay: time + :param duration: Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. + :type duration: time + :param startAmplitude: Amplitude of linearly varying current at time delay + :type startAmplitude: none + :param finishAmplitude: Amplitude of linearly varying current at time delay + duration + :type finishAmplitude: none + :param baselineAmplitude: Amplitude of current before time delay, and after time delay + duration + :type baselineAmplitude: none """ @@ -22237,12 +22280,17 @@ def _buildChildren( class RampGenerator(Standalone): """RampGenerator -- Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set - - * delay -- (dimension: time): Delay before change in current. Current is baselineAmplitude prior to this. - * duration -- (dimension: time): Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. - * startAmplitude -- (dimension: current): Amplitude of linearly varying current at time delay - * finishAmplitude -- (dimension: current): Amplitude of linearly varying current at time delay + duration - * baselineAmplitude -- (dimension: current): Amplitude of current before time delay, and after time delay + duration + \n + :param delay: Delay before change in current. Current is baselineAmplitude prior to this. + :type delay: time + :param duration: Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. + :type duration: time + :param startAmplitude: Amplitude of linearly varying current at time delay + :type startAmplitude: current + :param finishAmplitude: Amplitude of linearly varying current at time delay + duration + :type finishAmplitude: current + :param baselineAmplitude: Amplitude of current before time delay, and after time delay + duration + :type baselineAmplitude: current """ @@ -22614,12 +22662,17 @@ def _buildChildren( class SineGeneratorDL(Standalone): """SineGeneratorDL -- Dimensionless equivalent of **sineGenerator** . Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set - - * delay -- (dimension: time): Delay before change in current. Current is zero prior to this. - * phase -- (dimension: none): Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) - * duration -- (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. - * amplitude -- (dimension: none): Maximum amplitude of current - * period -- (dimension: time): Time period of oscillation + \n + :param phase: Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) + :type phase: none + :param delay: Delay before change in current. Current is zero prior to this. + :type delay: time + :param duration: Duration for holding current at amplitude. Current is zero after delay + duration. + :type duration: time + :param amplitude: Maximum amplitude of current + :type amplitude: none + :param period: Time period of oscillation + :type period: time """ @@ -22971,12 +23024,17 @@ def _buildChildren( class SineGenerator(Standalone): """SineGenerator -- Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set - - * delay -- (dimension: time): Delay before change in current. Current is zero prior to this. - * phase -- (dimension: none): Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) - * duration -- (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. - * amplitude -- (dimension: current): Maximum amplitude of current - * period -- (dimension: time): Time period of oscillation + \n + :param phase: Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) + :type phase: none + :param delay: Delay before change in current. Current is zero prior to this. + :type delay: time + :param duration: Duration for holding current at amplitude. Current is zero after delay + duration. + :type duration: time + :param amplitude: Maximum amplitude of current + :type amplitude: current + :param period: Time period of oscillation + :type period: time """ @@ -23360,10 +23418,13 @@ def _buildChildren( class PulseGeneratorDL(Standalone): """PulseGeneratorDL -- Dimensionless equivalent of **pulseGenerator** . Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set - - * delay -- (dimension: time): Delay before change in current. Current is zero prior to this. - * duration -- (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. - * amplitude -- (dimension: none): Amplitude of current pulse + \n + :param delay: Delay before change in current. Current is zero prior to this. + :type delay: time + :param duration: Duration for holding current at amplitude. Current is zero after delay + duration. + :type duration: time + :param amplitude: Amplitude of current pulse + :type amplitude: none """ @@ -23673,10 +23734,13 @@ def _buildChildren( class PulseGenerator(Standalone): """PulseGenerator -- Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set - - * delay -- (dimension: time): Delay before change in current. Current is zero prior to this. - * duration -- (dimension: time): Duration for holding current at amplitude. Current is zero after delay + duration. - * amplitude -- (dimension: current): Amplitude of current pulse + \n + :param delay: Delay before change in current. Current is zero prior to this. + :type delay: time + :param duration: Duration for holding current at amplitude. Current is zero after delay + duration. + :type duration: time + :param amplitude: Amplitude of current pulse + :type amplitude: current """ @@ -24382,14 +24446,10 @@ def _buildChildren( class ChannelDensityGHK2(Base): - """ChannelDensityGHK2 -- Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github. com/OpenSourceBrain/ghk-nernst. - - * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + """ChannelDensityGHK2 -- Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github.com/OpenSourceBrain/ghk-nernst. + \n + :param condDensity: + :type condDensity: conductanceDensity """ @@ -24732,15 +24792,10 @@ def _buildChildren( class ChannelDensityGHK(Base): - """ChannelDensityGHK -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. - - * permeability -- (dimension: permeability) - * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + """ChannelDensityGHK -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github.com/OpenSourceBrain/ghk-nernst. + \n + :param permeability: + :type permeability: permeability """ @@ -24872,7 +24927,7 @@ def validate_Nml2Quantity_permeability(self, value): validate_Nml2Quantity_permeability_patterns_ = [ [ - "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m_per_s|um_per_ms|cm_per_s|cm_per_ms))$" + "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m_per_s| um_per_ms|cm_per_s|cm_per_ms))$" ] ] @@ -25081,14 +25136,10 @@ def _buildChildren( class ChannelDensityNernst(Base): - """ChannelDensityNernst -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github. com/OpenSourceBrain/ghk-nernst. - - * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + """ChannelDensityNernst -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github.com/OpenSourceBrain/ghk-nernst. + \n + :param condDensity: + :type condDensity: conductanceDensity """ @@ -25493,14 +25544,11 @@ def _buildChildren( class ChannelDensity(Base): """ChannelDensity -- Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** - - * erev -- (dimension: voltage): The reversal potential of the current produced - * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + \n + :param erev: The reversal potential of the current produced + :type erev: voltage + :param condDensity: + :type condDensity: conductanceDensity """ @@ -25972,16 +26020,7 @@ def _buildChildren( class ChannelDensityNonUniformGHK(Base): - """ChannelDensityNonUniformGHK -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - - * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - """ + """ChannelDensityNonUniformGHK -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -26262,16 +26301,7 @@ def _buildChildren( class ChannelDensityNonUniformNernst(Base): - """ChannelDensityNonUniformNernst -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - - * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. - - """ + """ChannelDensityNonUniformNernst -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -26555,14 +26585,9 @@ def _buildChildren( class ChannelDensityNonUniform(Base): """ChannelDensityNonUniform -- Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON - - * erev -- (dimension: voltage): The reversal potential of the current produced - * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + \n + :param erev: The reversal potential of the current produced + :type erev: voltage """ @@ -26900,15 +26925,11 @@ def _buildChildren( class ChannelPopulation(Base): """ChannelPopulation -- Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** - - * number -- (dimension: none): The number of channels present. This will be multiplied by the time varying conductance of the individual ion channel ( which extends **baseIonChannel** ) to produce the total conductance - * erev -- (dimension: voltage): The reversal potential of the current produced - * ion -- Specifying the ion here again is redundant, this will be set in ionChannel definition. It is added here - TEMPORARILY since selecting all ca or na conducting channel populations/densities in a cell would be difficult otherwise. - Also, it will make it easier to set the correct native simulator value for erev (e.g. ek for ion = k in NEURON). - Currently a required attribute. - It should be removed in the longer term, due to possible inconsistencies in this value and that in the ionChannel - element. TODO: remove. + \n + :param number: The number of channels present. This will be multiplied by the time varying conductance of the individual ion channel ( which extends **baseIonChannel** ) to produce the total conductance + :type number: none + :param erev: The reversal potential of the current produced + :type erev: voltage """ @@ -29641,11 +29662,13 @@ def _buildChildren( class FixedFactorConcentrationModel(Standalone): """FixedFactorConcentrationModel -- Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. - - * ion -- Should not be required, as it's present on the species element! - * restingConc -- (dimension: concentration) - * decayConstant -- (dimension: time) - * rho -- (dimension: rho_factor) + \n + :param restingConc: + :type restingConc: concentration + :param decayConstant: + :type decayConstant: time + :param rho: + :type rho: rho_factor """ @@ -30048,13 +30071,14 @@ def _buildChildren( class DecayingPoolConcentrationModel(Standalone): - """DecayingPoolConcentrationModel -- Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course * - *decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** - - * ion -- Should not be required, as it's present on the species element! - * restingConc -- (dimension: concentration) - * decayConstant -- (dimension: time) - * shellThickness -- (dimension: length) + """DecayingPoolConcentrationModel -- Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** + \n + :param restingConc: + :type restingConc: concentration + :param decayConstant: + :type decayConstant: time + :param shellThickness: + :type shellThickness: length """ @@ -30875,7 +30899,12 @@ def _buildChildren( class GateFractional(Base): - """GateFractional -- Gate composed of subgates contributing with fractional conductance""" + """GateFractional -- Gate composed of subgates contributing with fractional conductance + \n + :param instances: + :type instances: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -31204,7 +31233,12 @@ def _buildChildren( class GateHHInstantaneous(Base): - """GateHHInstantaneous -- Gate which follows the general Hodgkin Huxley formalism but is instantaneous, so tau = 0 and gate follows exactly inf value""" + """GateHHInstantaneous -- Gate which follows the general Hodgkin Huxley formalism but is instantaneous, so tau = 0 and gate follows exactly inf value + \n + :param instances: + :type instances: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -31506,7 +31540,12 @@ def _buildChildren( class GateHHRatesInf(Base): - """GateHHRatesInf -- Gate which follows the general Hodgkin Huxley formalism""" + """GateHHRatesInf -- Gate which follows the general Hodgkin Huxley formalism + \n + :param instances: + :type instances: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -31889,7 +31928,12 @@ def _buildChildren( class GateHHRatesTau(Base): - """GateHHRatesTau -- Gate which follows the general Hodgkin Huxley formalism""" + """GateHHRatesTau -- Gate which follows the general Hodgkin Huxley formalism + \n + :param instances: + :type instances: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -32272,7 +32316,12 @@ def _buildChildren( class GateHHRatesTauInf(Base): - """GateHHRatesTauInf -- Gate which follows the general Hodgkin Huxley formalism""" + """GateHHRatesTauInf -- Gate which follows the general Hodgkin Huxley formalism + \n + :param instances: + :type instances: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -32694,7 +32743,12 @@ def _buildChildren( class GateHHTauInf(Base): - """GateHHTauInf -- Gate which follows the general Hodgkin Huxley formalism""" + """GateHHTauInf -- Gate which follows the general Hodgkin Huxley formalism + \n + :param instances: + :type instances: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -33044,7 +33098,12 @@ def _buildChildren( class GateHHRates(Base): - """GateHHRates -- Gate which follows the general Hodgkin Huxley formalism""" + """GateHHRates -- Gate which follows the general Hodgkin Huxley formalism + \n + :param instances: + :type instances: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -33914,7 +33973,12 @@ def _buildChildren( class GateKS(Base): - """GateKS -- A gate which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them""" + """GateKS -- A gate which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them + \n + :param instances: + :type instances: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -35168,7 +35232,12 @@ def _buildChildren( class OpenState(Base): - """OpenState -- A **KSState** with **relativeConductance** of 1""" + """OpenState -- A **KSState** with **relativeConductance** of 1 + \n + :param relativeConductance: + :type relativeConductance: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -35302,7 +35371,12 @@ def _buildChildren( class ClosedState(Base): - """ClosedState -- A **KSState** with **relativeConductance** of 0""" + """ClosedState -- A **KSState** with **relativeConductance** of 0 + \n + :param relativeConductance: + :type relativeConductance: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -35443,6 +35517,9 @@ def _buildChildren( class IonChannelKS(Standalone): """A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them IonChannelKS -- A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them + \n + :param conductance: + :type conductance: conductance """ @@ -39049,8 +39126,9 @@ def append(self, element): class BasePynnSynapse(BaseSynapse): """BasePynnSynapse -- Base type for all PyNN synapses. Note, the current **I** produced is dimensionless, but it requires a membrane potential **v** with dimension voltage - - * tau_syn -- (dimension: none) + \n + :param tau_syn: + :type tau_syn: none """ @@ -39246,13 +39324,18 @@ def _buildChildren( class basePyNNCell(BaseCell): - """basePyNNCell -- Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble. org/trac/PyNN/wiki/StandardModels - - * cm -- (dimension: none) - * i_offset -- (dimension: none) - * tau_syn_E -- (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell - * tau_syn_I -- (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell - * v_init -- (dimension: none) + """basePyNNCell -- Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble.org/trac/PyNN/wiki/StandardModels + \n + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none """ @@ -41409,8 +41492,11 @@ def __str__(self): class SpikeGeneratorRefPoisson(SpikeGeneratorPoisson): """SpikeGeneratorRefPoisson -- Generator of spikes whose ISI distribution is the maximum entropy distribution over [ **minimumISI,** +infinity ) with mean: 1 / **averageRate** - - * minimumISI -- (dimension: time): The minimum interspike interval + \n + :param minimumISI: The minimum interspike interval + :type minimumISI: time + :param averageRate: The average rate at which spikes are emitted + :type averageRate: per_time """ @@ -41843,7 +41929,12 @@ def _buildChildren( class ChannelDensityNernstCa2(ChannelDensityNernst): - """ChannelDensityNernstCa2 -- This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github. com/OpenSourceBrain/ghk-nernst.""" + """ChannelDensityNernstCa2 -- This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github.com/OpenSourceBrain/ghk-nernst. + \n + :param condDensity: + :type condDensity: conductanceDensity + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -42019,8 +42110,13 @@ def _buildChildren( class ChannelDensityVShift(ChannelDensity): """ChannelDensityVShift -- Same as **channelDensity** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. - - * vShift -- (dimension: voltage) + \n + :param vShift: + :type vShift: voltage + :param erev: The reversal potential of the current produced + :type erev: voltage + :param condDensity: + :type condDensity: conductanceDensity """ @@ -42259,15 +42355,7 @@ def _buildChildren( class Cell(BaseCell): - """Cell -- Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. - - * morphology -- Should only be used if morphology element is outside the cell. - This points to the id of the morphology - - * biophysicalProperties -- Should only be used if biophysicalProperties element is outside the cell. - This points to the id of the biophysicalProperties - - """ + """Cell -- Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -42984,29 +43072,50 @@ def summary(self): class PinskyRinzelCA3Cell(BaseCell): - """PinskyRinzelCA3Cell -- Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github. com/OpenSourceBrain/PinskyRinzelModel - - * iSoma -- (dimension: currentDensity) - * iDend -- (dimension: currentDensity) - * gc -- (dimension: conductanceDensity) - * gLs -- (dimension: conductanceDensity) - * gLd -- (dimension: conductanceDensity) - * gNa -- (dimension: conductanceDensity) - * gKdr -- (dimension: conductanceDensity) - * gCa -- (dimension: conductanceDensity) - * gKahp -- (dimension: conductanceDensity) - * gKC -- (dimension: conductanceDensity) - * gNmda -- (dimension: conductanceDensity) - * gAmpa -- (dimension: conductanceDensity) - * eNa -- (dimension: voltage) - * eCa -- (dimension: voltage) - * eK -- (dimension: voltage) - * eL -- (dimension: voltage) - * qd0 -- (dimension: none) - * pp -- (dimension: none) - * alphac -- (dimension: none) - * betac -- (dimension: none) - * cm -- (dimension: specificCapacitance) + """PinskyRinzelCA3Cell -- Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github.com/OpenSourceBrain/PinskyRinzelModel + \n + :param iSoma: + :type iSoma: currentDensity + :param iDend: + :type iDend: currentDensity + :param gLs: + :type gLs: conductanceDensity + :param gLd: + :type gLd: conductanceDensity + :param gNa: + :type gNa: conductanceDensity + :param gKdr: + :type gKdr: conductanceDensity + :param gCa: + :type gCa: conductanceDensity + :param gKahp: + :type gKahp: conductanceDensity + :param gKC: + :type gKC: conductanceDensity + :param gc: + :type gc: conductanceDensity + :param eNa: + :type eNa: voltage + :param eCa: + :type eCa: voltage + :param eK: + :type eK: voltage + :param eL: + :type eL: voltage + :param pp: + :type pp: none + :param cm: + :type cm: specificCapacitance + :param alphac: + :type alphac: none + :param betac: + :type betac: none + :param gNmda: + :type gNmda: conductanceDensity + :param gAmpa: + :type gAmpa: conductanceDensity + :param qd0: + :type qd0: none """ @@ -43896,13 +44005,19 @@ def _buildChildren( class FitzHughNagumo1969Cell(BaseCell): """FitzHughNagumo1969Cell -- The Fitzhugh Nagumo model is a two-dimensional simplification of the Hodgkin-Huxley model of spike generation in squid giant axons. This system was suggested by FitzHugh ( FitzHugh R. [1961]: Impulses and physiological states in theoretical models of nerve membrane. Biophysical J. 1:445-466 ), who called it " Bonhoeffer-van der Pol model ", and the equivalent circuit by Nagumo et al. ( Nagumo J. , Arimoto S. , and Yoshizawa S. [1962] An active pulse transmission line simulating nerve axon. Proc IRE. 50:2061-2070. 1962 ). This version corresponds to the one described in FitzHugh R. [1969]: Mathematical models of excitation and propagation in nerve. Chapter 1 ( pp. 1-85 in H. P. Schwan, ed. Biological Engineering, McGraw-Hill Book Co. , N. Y. ) - - * a -- (dimension: none) - * b -- (dimension: none) - * I -- (dimension: none): plays the role of an external injected current - * phi -- (dimension: none) - * V0 -- (dimension: none) - * W0 -- (dimension: none) + \n + :param a: + :type a: none + :param b: + :type b: none + :param I: plays the role of an external injected current + :type I: none + :param phi: + :type phi: none + :param V0: + :type V0: none + :param W0: + :type W0: none """ @@ -44218,9 +44333,10 @@ def _buildChildren( class FitzHughNagumoCell(BaseCell): - """FitzHughNagumoCell -- Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github. com/NeuroML/NeuroML2/issues/42 ) - - * I -- (dimension: none) + """FitzHughNagumoCell -- Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github.com/NeuroML/NeuroML2/issues/42 ) + \n + :param I: + :type I: none """ @@ -44436,8 +44552,9 @@ def _buildChildren( class BaseCellMembPotCap(BaseCell): """BaseCellMembPotCap -- Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) - - * C -- (dimension: capacitance): Total capacitance of the cell membrane + \n + :param C: Total capacitance of the cell membrane + :type C: capacitance """ @@ -44681,14 +44798,20 @@ def _buildChildren( class IzhikevichCell(BaseCell): - """IzhikevichCell -- Cell based on the 2003 model of Izhikevich, see http://izhikevich. org/publications/spikes. htm - - * v0 -- (dimension: voltage): Initial membrane potential - * thresh -- (dimension: voltage): Spike threshold - * a -- (dimension: none): Time scale of the recovery variable U - * b -- (dimension: none): Sensitivity of U to the subthreshold fluctuations of the membrane potential v - * c -- (dimension: none): After-spike reset value of v - * d -- (dimension: none): After-spike reset of u + """IzhikevichCell -- Cell based on the 2003 model of Izhikevich, see http://izhikevich.org/publications/spikes.htm + \n + :param v0: Initial membrane potential + :type v0: voltage + :param a: Time scale of the recovery variable U + :type a: none + :param b: Sensitivity of U to the subthreshold fluctuations of the membrane potential v + :type b: none + :param c: After-spike reset value of v + :type c: none + :param d: After-spike reset of u + :type d: none + :param thresh: Spike threshold + :type thresh: voltage """ @@ -45035,9 +45158,17 @@ def _buildChildren( class IafCell(BaseCell): """IafCell -- Integrate and fire cell with capacitance **C,** **leakConductance** and **leakReversal** - - * leakReversal -- (dimension: voltage) - * leakConductance -- (dimension: conductance) + \n + :param leakConductance: + :type leakConductance: conductance + :param leakReversal: + :type leakReversal: voltage + :param thresh: + :type thresh: voltage + :param reset: + :type reset: voltage + :param C: Total capacitance of the cell membrane + :type C: capacitance """ @@ -45447,9 +45578,15 @@ def _buildChildren( class IafTauCell(BaseCell): """IafTauCell -- Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time constant **tau** - - * leakReversal -- (dimension: voltage) - * tau -- (dimension: time) + \n + :param leakReversal: + :type leakReversal: voltage + :param tau: + :type tau: time + :param thresh: The membrane potential at which to emit a spiking event and reset voltage + :type thresh: voltage + :param reset: The value the membrane potential is reset to on spiking + :type reset: voltage """ @@ -45791,13 +45928,18 @@ def _buildChildren( class GradedSynapse(BaseSynapse): - """GradedSynapse -- Graded/analog synapse. Based on synapse in Methods of http://www. nature. com/neuro/journal/v7/n12/abs/nn1352. html - Based on synapse in Methods of http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html. - conductance -- (dimension: conductance) - delta -- (dimension: voltage): Slope of the activation curve - Vth -- (dimension: voltage): The half-activation voltage of the synapse - k -- (dimension: per_time): Rate constant for transmitter-receptor dissociation rate - erev -- (dimension: voltage): The reversal potential of the synapse + """GradedSynapse -- Graded/analog synapse. Based on synapse in Methods of http://www. nature.com/neuro/journal/v7/n12/abs/nn1352.html + \n + :param conductance: + :type conductance: conductance + :param delta: Slope of the activation curve + :type delta: voltage + :param k: Rate constant for transmitter-receptor dissociation rate + :type k: per_time + :param Vth: The half-activation voltage of the synapse + :type Vth: voltage + :param erev: The reversal potential of the synapse + :type erev: voltage """ @@ -46173,8 +46315,9 @@ def _buildChildren( class LinearGradedSynapse(BaseSynapse): """LinearGradedSynapse -- Behaves just like a one way gap junction. - Behaves just like a one way gap junction. - conductance -- (dimension: conductance) + \n + :param conductance: + :type conductance: conductance """ @@ -46401,10 +46544,7 @@ def _buildChildren( class SilentSynapse(BaseSynapse): - """SilentSynapse -- Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection. - Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection (continuousConnection). - - """ + """SilentSynapse -- Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -46556,8 +46696,9 @@ def _buildChildren( class GapJunction(BaseSynapse): """GapJunction -- Gap junction/single electrical connection - Gap junction/single electrical connection - conductance -- (dimension: conductance) + \n + :param conductance: + :type conductance: conductance """ @@ -47150,7 +47291,12 @@ def _buildChildren( class IonChannel(IonChannelScalable): - """IonChannel -- Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH.""" + """IonChannel -- Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. + \n + :param conductance: + :type conductance: conductance + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -47801,7 +47947,12 @@ def _buildChildren( class AlphaCurrSynapse(BasePynnSynapse): - """AlphaCurrSynapse -- Alpha synapse: rise time and decay time are both tau_syn. Current based synapse.""" + """AlphaCurrSynapse -- Alpha synapse: rise time and decay time are both tau_syn. Current based synapse. + \n + :param tau_syn: + :type tau_syn: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -47961,7 +48112,12 @@ def _buildChildren( class ExpCurrSynapse(BasePynnSynapse): - """ExpCurrSynapse -- Current based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn )""" + """ExpCurrSynapse -- Current based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) + \n + :param tau_syn: + :type tau_syn: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -48114,8 +48270,11 @@ def _buildChildren( class AlphaCondSynapse(BasePynnSynapse): """AlphaCondSynapse -- Alpha synapse: rise time and decay time are both tau_syn. Conductance based synapse. - - * e_rev -- (dimension: none) + \n + :param e_rev: + :type e_rev: none + :param tau_syn: + :type tau_syn: none """ @@ -48293,8 +48452,11 @@ def _buildChildren( class ExpCondSynapse(BasePynnSynapse): """ExpCondSynapse -- Conductance based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) - - * e_rev -- (dimension: none) + \n + :param e_rev: + :type e_rev: none + :param tau_syn: + :type tau_syn: none """ @@ -48464,16 +48626,36 @@ def _buildChildren( class HH_cond_exp(basePyNNCell): """HH_cond_exp -- Single-compartment Hodgkin-Huxley-type neuron with transient sodium and delayed-rectifier potassium currents using the ion channel models from Traub. - - * v_offset -- (dimension: none) - * e_rev_E -- (dimension: none) - * e_rev_I -- (dimension: none) - * e_rev_K -- (dimension: none) - * e_rev_Na -- (dimension: none) - * e_rev_leak -- (dimension: none) - * g_leak -- (dimension: none) - * gbar_K -- (dimension: none) - * gbar_Na -- (dimension: none) + \n + :param gbar_K: + :type gbar_K: none + :param gbar_Na: + :type gbar_Na: none + :param g_leak: + :type g_leak: none + :param e_rev_K: + :type e_rev_K: none + :param e_rev_Na: + :type e_rev_Na: none + :param e_rev_leak: + :type e_rev_leak: none + :param v_offset: + :type v_offset: none + :param e_rev_E: + :type e_rev_E: none + :param e_rev_I: + :type e_rev_I: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type + tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none """ @@ -48791,12 +48973,27 @@ def _buildChildren( class basePyNNIaFCell(basePyNNCell): """basePyNNIaFCell -- Base type of any PyNN standard integrate and fire model - - * tau_m -- (dimension: none) - * tau_refrac -- (dimension: none) - * v_reset -- (dimension: none) - * v_rest -- (dimension: none) - * v_thresh -- (dimension: none) + \n + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none """ @@ -49762,9 +49959,11 @@ def __str__(self): class ConnectionWD(BaseConnectionOldFormat): """ConnectionWD -- Event connection between named components, which gets processed via a new instance of a synapse component which is created on the target component, includes setting of **weight** and **delay** for the synaptic connection - - * weight -- (dimension: none) - * delay -- (dimension: time) + \n + :param weight: + :type weight: none + :param delay: + :type delay: time """ @@ -50565,17 +50764,29 @@ def _buildChildren( class AdExIaFCell(BaseCellMembPotCap): """AdExIaFCell -- Model based on Brette R and Gerstner W ( 2005 ) Adaptive Exponential Integrate-and-Fire Model as an Effective Description of Neuronal Activity. J Neurophysiol 94:3637-3642 - - * gL -- (dimension: conductance) - * EL -- (dimension: voltage) - * reset -- (dimension: voltage) - * VT -- (dimension: voltage) - * thresh -- (dimension: voltage) - * delT -- (dimension: voltage) - * tauw -- (dimension: time) - * refract -- (dimension: time) - * a -- (dimension: conductance) - * b -- (dimension: current) + \n + :param gL: + :type gL: conductance + :param EL: + :type EL: voltage + :param VT: + :type VT: voltage + :param thresh: + :type thresh: voltage + :param reset: + :type reset: voltage + :param delT: + :type delT: voltage + :param tauw: + :type tauw: time + :param refract: + :type refract: time + :param a: + :type a: conductance + :param b: + :type b: current + :param C: Total capacitance of the cell membrane + :type C: capacitance """ @@ -51103,16 +51314,27 @@ def _buildChildren( class Izhikevich2007Cell(BaseCellMembPotCap): """Izhikevich2007Cell -- Cell based on the modified Izhikevich model in Izhikevich 2007, Dynamical systems in neuroscience, MIT Press - - * v0 -- (dimension: voltage) - * k -- (dimension: conductance_per_voltage) - * vr -- (dimension: voltage) - * vt -- (dimension: voltage) - * vpeak -- (dimension: voltage) - * a -- (dimension: per_time) - * b -- (dimension: conductance) - * c -- (dimension: voltage) - * d -- (dimension: current) + \n + :param v0: + :type v0: voltage + :param k: + :type k: conductance_per_voltage + :param vr: + :type vr: voltage + :param vt: + :type vt: voltage + :param vpeak: + :type vpeak: voltage + :param a: + :type a: per_time + :param b: + :type b: conductance + :param c: + :type c: voltage + :param d: + :type d: current + :param C: Total capacitance of the cell membrane + :type C: capacitance """ @@ -51651,8 +51873,19 @@ def _buildChildren( class IafRefCell(IafCell): """IafRefCell -- Integrate and fire cell with capacitance **C,** **leakConductance,** **leakReversal** and refractory period **refract** - - * refract -- (dimension: time) + \n + :param refract: + :type refract: time + :param leakConductance: + :type leakConductance: conductance + :param leakReversal: + :type leakReversal: voltage + :param thresh: + :type thresh: voltage + :param reset: + :type reset: voltage + :param C: Total capacitance of the cell membrane + :type C: capacitance """ @@ -51875,8 +52108,17 @@ def _buildChildren( class IafTauRefCell(IafTauCell): """IafTauRefCell -- Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time course **tau.** It has a refractory period of **refract** after spiking - - * refract -- (dimension: time) + \n + :param refract: + :type refract: time + :param leakReversal: + :type leakReversal: voltage + :param tau: + :type tau: time + :param thresh: The membrane potential at which to emit a spiking event and reset voltage + :type thresh: voltage + :param reset: The value the membrane potential is reset to on spiking + :type reset: voltage """ @@ -52378,9 +52620,11 @@ def _buildChildren( class AlphaCurrentSynapse(BaseCurrentBasedSynapse): """AlphaCurrentSynapse -- Alpha current synapse: rise time and decay time are both **tau.** - - * tau -- (dimension: time): Time course for rise and decay - * ibase -- (dimension: current): Baseline current increase after receiving a spike + \n + :param tau: Time course for rise and decay + :type tau: time + :param ibase: Baseline current increase after receiving a spike + :type ibase: current """ @@ -52657,10 +52901,13 @@ def _buildChildren( class BaseConductanceBasedSynapseTwo(BaseVoltageDepSynapse): """BaseConductanceBasedSynapseTwo -- Synapse model suited for a sum of two expTwoSynapses which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 - - * gbase1 -- (dimension: conductance): Baseline conductance 1 - * gbase2 -- (dimension: conductance): Baseline conductance 2 - * erev -- (dimension: voltage): Reversal potential of the synapse + \n + :param gbase1: Baseline conductance 1 + :type gbase1: conductance + :param gbase2: Baseline conductance 2 + :type gbase2: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage """ @@ -53006,9 +53253,11 @@ def _buildChildren( class BaseConductanceBasedSynapse(BaseVoltageDepSynapse): """BaseConductanceBasedSynapse -- Synapse model which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 - - * gbase -- (dimension: conductance): Baseline conductance, generally the maximum conductance following a single spike - * erev -- (dimension: voltage): Reversal potential of the synapse + \n + :param gbase: Baseline conductance, generally the maximum conductance following a single spike + :type gbase: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage """ @@ -53323,8 +53572,11 @@ def _buildChildren( class IonChannelVShift(IonChannel): """IonChannelVShift -- Same as **ionChannel** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. - - * vShift -- (dimension: voltage) + \n + :param vShift: + :type vShift: voltage + :param conductance: + :type conductance: conductance """ @@ -53577,7 +53829,12 @@ def _buildChildren( class IonChannelHH(IonChannel): - """IonChannelHH -- Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH.""" + """IonChannelHH -- Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. + \n + :param conductance: + :type conductance: conductance + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -53758,7 +54015,30 @@ def _buildChildren( class IF_curr_exp(basePyNNIaFCell): - """IF_curr_exp -- Leaky integrate and fire model with fixed threshold and decaying-exponential post-synaptic current""" + """IF_curr_exp -- Leaky integrate and fire model with fixed threshold and decaying-exponential post-synaptic current + \n + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -53935,7 +54215,30 @@ def _buildChildren( class IF_curr_alpha(basePyNNIaFCell): - """IF_curr_alpha -- Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic current""" + """IF_curr_alpha -- Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic current + \n + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -54113,9 +54416,31 @@ def _buildChildren( class basePyNNIaFCondCell(basePyNNIaFCell): """basePyNNIaFCondCell -- Base type of conductance based PyNN IaF cell models - - * e_rev_E -- (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell - * e_rev_I -- (dimension: none): This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + \n + :param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_E: none + :param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_I: none + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none """ @@ -54799,10 +55124,19 @@ def __str__(self): class ExpThreeSynapse(BaseConductanceBasedSynapseTwo): """ExpThreeSynapse -- Ohmic synapse similar to expTwoSynapse but consisting of two components that can differ in decay times and max conductances but share the same rise time. - - * tauDecay1 -- (dimension: time) - * tauDecay2 -- (dimension: time) - * tauRise -- (dimension: time) + \n + :param tauRise: + :type tauRise: time + :param tauDecay1: + :type tauDecay1: time + :param tauDecay2: + :type tauDecay2: time + :param gbase1: Baseline conductance 1 + :type gbase1: conductance + :param gbase2: Baseline conductance 2 + :type gbase2: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage """ @@ -55088,9 +55422,15 @@ def _buildChildren( class ExpTwoSynapse(BaseConductanceBasedSynapse): """ExpTwoSynapse -- Ohmic synapse model whose conductance waveform on receiving an event has a rise time of **tauRise** and a decay time of **tauDecay.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** - - * tauDecay -- (dimension: time) - * tauRise -- (dimension: time) + \n + :param tauRise: + :type tauRise: time + :param tauDecay: + :type tauDecay: time + :param gbase: Baseline conductance, generally the maximum conductance following a single spike + :type gbase: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage """ @@ -55364,8 +55704,13 @@ def _buildChildren( class ExpOneSynapse(BaseConductanceBasedSynapse): """ExpOneSynapse -- Ohmic synapse model whose conductance rises instantaneously by ( **gbase** * **weight** ) on receiving an event, and which decays exponentially to zero with time course **tauDecay** - - * tauDecay -- (dimension: time): Time course of decay + \n + :param tauDecay: Time course of decay + :type tauDecay: time + :param gbase: Baseline conductance, generally the maximum conductance following a single spike + :type gbase: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage """ @@ -55591,8 +55936,13 @@ def _buildChildren( class AlphaSynapse(BaseConductanceBasedSynapse): """AlphaSynapse -- Ohmic synapse model where rise time and decay time are both **tau.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** - - * tau -- (dimension: time): Time course of rise/decay + \n + :param tau: Time course of rise/decay + :type tau: time + :param gbase: Baseline conductance, generally the maximum conductance following a single spike + :type gbase: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage """ @@ -55810,12 +56160,41 @@ def _buildChildren( class EIF_cond_exp_isfa_ista(basePyNNIaFCondCell): """EIF_cond_exp_isfa_ista -- Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with exponentially-decaying post-synaptic conductance - - * a -- (dimension: none) - * b -- (dimension: none) - * delta_T -- (dimension: none) - * tau_w -- (dimension: none) - * v_spike -- (dimension: none) + \n + :param v_spike: + :type v_spike: none + :param delta_T: + :type delta_T: none + :param tau_w: + :type tau_w: none + :param a: + :type a: none + :param b: + :type b: none + :param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_E: none + :param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_I: none + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none """ @@ -56104,7 +56483,34 @@ def _buildChildren( class IF_cond_exp(basePyNNIaFCondCell): - """IF_cond_exp -- Leaky integrate and fire model with fixed threshold and exponentially-decaying post-synaptic conductance""" + """IF_cond_exp -- Leaky integrate and fire model with fixed threshold and exponentially-decaying post-synaptic conductance + \n + :param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_E: none + :param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_I: none + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -56285,7 +56691,34 @@ def _buildChildren( class IF_cond_alpha(basePyNNIaFCondCell): - """IF_cond_alpha -- Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic conductance""" + """IF_cond_alpha -- Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic conductance + \n + :param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_E: none + :param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_I: none + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] @@ -56467,8 +56900,9 @@ def _buildChildren( class ContinuousConnectionInstanceW(ContinuousConnectionInstance): """ContinuousConnectionInstanceW -- An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. Includes setting of **weight** for the connection - - * weight -- (dimension: none) + \n + :param weight: + :type weight: none """ @@ -56692,8 +57126,9 @@ def __str__(self): class ElectricalConnectionInstanceW(ElectricalConnectionInstance): """ElectricalConnectionInstanceW -- To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. Includes setting of **weight** for the connection - - * weight -- (dimension: none) + \n + :param weight: + :type weight: none """ @@ -56916,7 +57351,18 @@ def __str__(self): class BlockingPlasticSynapse(ExpTwoSynapse): - """BlockingPlasticSynapse -- Biexponential synapse that allows for optional block and plasticity mechanisms, which can be expressed as child elements.""" + """BlockingPlasticSynapse -- Biexponential synapse that allows for optional block and plasticity mechanisms, which can be expressed as child elements. + \n + :param tauRise: + :type tauRise: time + :param tauDecay: + :type tauDecay: time + :param gbase: Baseline conductance, generally the maximum conductance following a single spike + :type gbase: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ @@ -57167,7 +57613,44 @@ def _buildChildren( class EIF_cond_alpha_isfa_ista(EIF_cond_exp_isfa_ista): - """EIF_cond_alpha_isfa_ista -- Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with alpha-function-shaped post-synaptic conductance""" + """EIF_cond_alpha_isfa_ista -- Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with alpha-function-shaped post-synaptic conductance + \n + :param v_spike: + :type v_spike: none + :param delta_T: + :type delta_T: none + :param tau_w: + :type tau_w: none + :param a: + :type a: none + :param b: + :type b: none + :param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_E: none + :param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_I: none + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [] From cc673e16423e420fcba29024b90496e8eafb3fb2 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Thu, 18 Nov 2021 10:46:17 +0000 Subject: [PATCH 122/136] docs: add note about autogeneration of nml.py documentation --- doc/userdocs/coreclasses.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/userdocs/coreclasses.rst b/doc/userdocs/coreclasses.rst index c077feb0..275208fb 100644 --- a/doc/userdocs/coreclasses.rst +++ b/doc/userdocs/coreclasses.rst @@ -38,4 +38,8 @@ Please also note that this module is also included in the top level of the `neur List of Component classes ~~~~~~~~~~~~~~~~~~~~~~~~~~ +This documentation is auto-generated from the `NeuroML schema `__. +In case of issues, please refer to the schema documentation for clarifications. +If the schema documentation does not resolve the issue, please `contact us `__. + .. include:: coreclasses_list.txt From f3fc7f98b6c8d1086b7eaf553910b46a3df6013c Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Thu, 18 Nov 2021 18:21:35 +0000 Subject: [PATCH 123/136] chore: sync XSD --- neuroml/nml/NeuroML_v2.2.xsd | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/neuroml/nml/NeuroML_v2.2.xsd b/neuroml/nml/NeuroML_v2.2.xsd index 8654a3cc..0b1789ed 100644 --- a/neuroml/nml/NeuroML_v2.2.xsd +++ b/neuroml/nml/NeuroML_v2.2.xsd @@ -63,7 +63,7 @@ - + @@ -959,7 +959,7 @@ - + @@ -984,7 +984,7 @@ - + @@ -1422,11 +1422,11 @@ :type v0: voltage :param a: Time scale of the recovery variable U :type a: none -:param b: Sensitivity of U to the subthreshold fluctuations of the membrane potential v +:param b: Sensitivity of U to the subthreshold fluctuations of the membrane potential V :type b: none -:param c: After-spike reset value of v +:param c: After-spike reset value of V :type c: none -:param d: After-spike reset of u +:param d: After-spike increase to U :type d: none :param thresh: Spike threshold :type thresh: voltage @@ -1683,11 +1683,11 @@ - + - + @@ -1988,7 +1988,7 @@ - + @@ -2011,7 +2011,7 @@ - + @@ -2029,7 +2029,7 @@ - + @@ -2047,7 +2047,7 @@ - + @@ -2076,7 +2076,7 @@ - + @@ -2121,7 +2121,7 @@ - + @@ -2159,7 +2159,7 @@ - + @@ -2185,7 +2185,7 @@ - + @@ -2234,7 +2234,7 @@ - + From c0478045991d21500972c30b16fdd0b53d38c5c1 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Thu, 18 Nov 2021 18:21:43 +0000 Subject: [PATCH 124/136] chore: regenerate nml.py --- neuroml/nml/nml.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 80e988c3..d4e06293 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Thu Nov 18 10:42:12 2021 by generateDS.py version 2.40.3. +# Generated Thu Nov 18 18:20:14 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: @@ -10905,10 +10905,10 @@ def _buildChildren( class Species(GeneratedsSuper): - """Species -- Description of a chemical sp - ecies identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration + """Species -- Description of a chemical species identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration \n - :param initialConcentration: + :param initialConcentration + : :type initialConcentration: concentration :param initialExtConcentration: :type initialExtConcentration: concentration @@ -24927,7 +24927,7 @@ def validate_Nml2Quantity_permeability(self, value): validate_Nml2Quantity_permeability_patterns_ = [ [ - "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m_per_s| um_per_ms|cm_per_s|cm_per_ms))$" + "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m_per_s|um_per_ms|cm_per_s|cm_per_ms))$" ] ] @@ -44804,11 +44804,11 @@ class IzhikevichCell(BaseCell): :type v0: voltage :param a: Time scale of the recovery variable U :type a: none - :param b: Sensitivity of U to the subthreshold fluctuations of the membrane potential v + :param b: Sensitivity of U to the subthreshold fluctuations of the membrane potential V :type b: none - :param c: After-spike reset value of v + :param c: After-spike reset value of V :type c: none - :param d: After-spike reset of u + :param d: After-spike increase to U :type d: none :param thresh: Spike threshold :type thresh: voltage @@ -48650,10 +48650,10 @@ class HH_cond_exp(basePyNNCell): :param i_offset: :type i_offset: none :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell - :type - tau_syn_E: none + :type tau_syn_E: none :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell - :type tau_syn_I: none + :type tau + _syn_I: none :param v_init: :type v_init: none From aaad7639aec7559749012ad9d3a250ba38db659f Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Thu, 18 Nov 2021 18:50:22 +0000 Subject: [PATCH 125/136] chore: do not lint build folder --- setup.cfg | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 307b0ebf..fa95b864 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,7 +3,13 @@ # spacing around operators, comment blocks, in argument lists # lines too long ignore = E501,E502,F403,F405,E231,E228,E225,E226,E265,E261 -exclude = neuroml/nml/nml.py,doc +exclude = + neuroml/nml/nml.py, + doc, + build [mypy] -exclude = neuroml/nml/nml.py,doc +exclude = + neuroml/nml/nml.py, + doc, + build From a97d80832546f07a2b9143daf3ff90fb0e3cf7a5 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 19 Nov 2021 10:32:41 +0000 Subject: [PATCH 126/136] chore: sync XSD --- neuroml/nml/NeuroML_v2.2.xsd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/nml/NeuroML_v2.2.xsd b/neuroml/nml/NeuroML_v2.2.xsd index 0b1789ed..a5a5461e 100644 --- a/neuroml/nml/NeuroML_v2.2.xsd +++ b/neuroml/nml/NeuroML_v2.2.xsd @@ -1,4 +1,4 @@ - + From 3da2d830beec1a6cfb5bb97afc17d68d7ad9d3aa Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 19 Nov 2021 10:32:58 +0000 Subject: [PATCH 127/136] chore: regenerate nml.py --- neuroml/nml/nml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index d4e06293..e10f4b16 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Thu Nov 18 18:20:14 2021 by generateDS.py version 2.40.3. +# Generated Fri Nov 19 10:31:02 2021 by generateDS.py version 2.40.3. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: From 39c14375e7d2bcc4ae610bf8fbb7d0e9be7d386a Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 19 Nov 2021 10:43:06 +0000 Subject: [PATCH 128/136] test: only use assertWarns check on Python3 It was added in Python 3.2 and is not present in Python 2.7. Reference: https://docs.python.org/3.10/library/unittest.html#unittest.TestCase.assertWarns --- neuroml/test/test_nml.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/neuroml/test/test_nml.py b/neuroml/test/test_nml.py index 5a22dea0..dd1ef40a 100644 --- a/neuroml/test/test_nml.py +++ b/neuroml/test/test_nml.py @@ -16,6 +16,8 @@ except ImportError: import unittest +import platform + class TestNML(unittest.TestCase): @@ -49,7 +51,10 @@ def test_generic_add_single(self): self.assertIsNone(doc.add(cell)) # Already added, so throw exception - with self.assertWarns(UserWarning): + if int(platform.python_version_tuple()[0]) > 2: + with self.assertWarns(UserWarning): + doc.add(cell) + else: doc.add(cell) # Success @@ -139,7 +144,10 @@ def test_add_to_container(self): pop3 = neuroml.Population(id="unique") network.add(pop3) # warning because this is already added - with self.assertWarns(UserWarning): + if int(platform.python_version_tuple()[0]) > 2: + with self.assertWarns(UserWarning): + network.add(pop3) + else: network.add(pop3) # Note that for Python, this is a new object From e5ada9889666339e0fa52b7b06540ebd22e5b66c Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 19 Nov 2021 10:53:18 +0000 Subject: [PATCH 129/136] test: only use assertRegex/assertNotRegex on Python3+ assertRegex was added in 3.1 assertNotRegex was added in 3.2 --- neuroml/test/test_nml.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/neuroml/test/test_nml.py b/neuroml/test/test_nml.py index dd1ef40a..30c5b3d7 100644 --- a/neuroml/test/test_nml.py +++ b/neuroml/test/test_nml.py @@ -159,6 +159,7 @@ def test_info(self): """Test getting member info.""" cell = neuroml.Cell(id="testcell") info = cell.info() - self.assertRegex(info, "morphology") - self.assertRegex(info, "biophysical_properties") - self.assertNotRegex(info, "network") + if int(platform.python_version_tuple()[0]) > 2: + self.assertRegex(info, "morphology") + self.assertRegex(info, "biophysical_properties") + self.assertNotRegex(info, "network") From bbe4cb60d9feed22e7647ee18dc99e15749faab9 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 19 Nov 2021 11:15:15 +0000 Subject: [PATCH 130/136] chore: use latest available generateds --- neuroml/nml/nml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index e10f4b16..47f98216 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # -# Generated Fri Nov 19 10:31:02 2021 by generateDS.py version 2.40.3. +# Generated Fri Nov 19 11:14:04 2021 by generateDS.py version 2.40.5. # Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: From aa025c608a678697d14acaca1cf8d7ed0a0a7fd6 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Mon, 22 Nov 2021 13:18:36 +0000 Subject: [PATCH 131/136] fix: py2 build Use BytesIO for py2, but StringIO for py3. I'm not entirely sure why this is needed here. It isn't necessarily an issue in nml.py---it could be in the loaders where we're not being too careful about py2/py3. The versions of pytables in use are also different because pytables dropped support for py2 in 3.6.x. --- neuroml/nml/nml.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 47f98216..cb7cbbb9 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -211,8 +211,10 @@ def __str__(self): for n in settings: if hasattr(self, n): setattr(settings[n], self[n]) - from io import StringIO - + if sys.version_info.major == 2: + from io import BytesIO as StringIO + else: + from io import StringIO output = StringIO() self.export( output, From 914cb2cd808c2db854074fdfc53442bfb226db70 Mon Sep 17 00:00:00 2001 From: Padraig Gleeson Date: Wed, 24 Nov 2021 11:18:30 +0000 Subject: [PATCH 132/136] To v0.3.1 --- neuroml/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuroml/__init__.py b/neuroml/__init__.py index 791361a7..e9cfc434 100644 --- a/neuroml/__init__.py +++ b/neuroml/__init__.py @@ -1,6 +1,6 @@ from .nml.nml import * # allows importation of all neuroml classes -__version__ = "0.2.58" +__version__ = "0.3.1" __version_info__ = tuple(int(i) for i in __version__.split(".")) From fb74540ce087621189b9f013d480e019a0a551c3 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 26 Nov 2021 13:40:25 +0000 Subject: [PATCH 133/136] Revert "fix: py2 build" This reverts commit aa025c608a678697d14acaca1cf8d7ed0a0a7fd6. --- neuroml/nml/nml.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index cb7cbbb9..47f98216 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -211,10 +211,8 @@ def __str__(self): for n in settings: if hasattr(self, n): setattr(settings[n], self[n]) - if sys.version_info.major == 2: - from io import BytesIO as StringIO - else: - from io import StringIO + from io import StringIO + output = StringIO() self.export( output, From fcdcd56af1ded448696f009d041a3aa2183b81a8 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Fri, 26 Nov 2021 14:29:03 +0000 Subject: [PATCH 134/136] fix: correctly fix py2/3 StringIO error In python 2, we must use `from StringIO import StringIO` for strings. This is not the same as `from io import StringIO` in Python2 (or in Python3). The former handles strings, the latter handles unicode. For Py2, since we're not prefixing all strings with `u""` to make them unicode, we need to use the first one. So, different `StringIO` classes need to be imported for Python2 and Python3. The same `from io import StringIO` cannot be shared. - https://docs.python.org/2.7/library/stringio.html?highlight=stringio#StringIO.StringIO - https://docs.python.org/2.7/library/io.html?highlight=io%20stringio#io.StringIO --- neuroml/nml/nml.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 47f98216..d6e585e8 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -211,7 +211,10 @@ def __str__(self): for n in settings: if hasattr(self, n): setattr(settings[n], self[n]) - from io import StringIO + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO output = StringIO() self.export( From 74239ec50d57269581c583c1291e3fc813002f0a Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Thu, 2 Dec 2021 18:17:13 +0000 Subject: [PATCH 135/136] chore(py27): pin numexpr --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 02430e11..12676046 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,8 @@ six lxml numpy pymongo -numexpr +numexpr ; python_version >= '3' +numexpr<2.8 ; python_version < '3' simplejson; python_version < '3.5' git+git://github.com/PyTables/PyTables/@master#egg=tables ; python_version >= '3.10' tables>=3.3.0 ; python_version < '3.10' From ef4da12ca0e63b3ef45c039e887e47dbb569a438 Mon Sep 17 00:00:00 2001 From: Padraig Gleeson Date: Thu, 16 Dec 2021 12:18:24 +0000 Subject: [PATCH 136/136] Remove travis badge --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 7f8e10ef..6f6fe309 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,5 @@ ## Introduction -[![Travis CI](https://travis-ci.org/NeuralEnsemble/libNeuroML.svg?branch=master)](https://travis-ci.org/NeuralEnsemble/libNeuroML) [![GH Build](https://github.com/NeuralEnsemble/libNeuroML/actions/workflows/ci.yml/badge.svg)](https://github.com/NeuralEnsemble/libNeuroML/actions/workflows/ci.yml) [![Documentation Status](https://readthedocs.org/projects/libneuroml/badge/?version=latest)](https://libneuroml.readthedocs.io/en/latest/?badge=latest) [![PyPI](https://img.shields.io/pypi/v/libNeuroML)](https://pypi.org/project/libNeuroML/)