diff --git a/0.6.1/404.html b/0.6.1/404.html new file mode 100644 index 000000000..c25f3fb88 --- /dev/null +++ b/0.6.1/404.html @@ -0,0 +1,1252 @@ + + + + + + + + + + + + + + + + + + + + + EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ +

404 - Not found

+ +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/CHANGELOG/index.html b/0.6.1/CHANGELOG/index.html new file mode 100644 index 000000000..ac26a031b --- /dev/null +++ b/0.6.1/CHANGELOG/index.html @@ -0,0 +1,2960 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + Changelog - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + +

Changelog

+

v0.6.1 (2024-01-17)

+

Full Changelog

+

Closed issues:

+
    +
  • No tests for ontology.save #684
  • +
  • Allow using HermiT from ontoconvert #664
  • +
  • PrefLabel used by new_entity, and get_by_label even if it is not in the ontology #642
  • +
  • ontology(imported=True) returns all classes in world #640
  • +
  • excel2onto example doesn't come into the github pages documentation #626
  • +
  • owlready2 > 0.41 fails #624
  • +
  • get_by_label and get_by_label all force add label_annotations #621
  • +
  • hasPhysicalDimension convention has changed in EMMO-1.0.0-beta3 #347
  • +
+

Merged pull requests:

+ +

v0.6.0 (2023-06-19)

+

Full Changelog

+

Closed issues:

+
    +
  • pyparsing has been updated #629
  • +
+

Merged pull requests:

+
    +
  • Check prefLabels in imported ontologies only if asked for. #628 (francescalb)
  • +
+

v0.5.4 (2023-06-15)

+

Full Changelog

+

v0.5.3.2 (2023-06-15)

+

Full Changelog

+

Merged pull requests:

+
    +
  • remove warnings_as_errors in cd workflow introduced in 0.5.3 #625 (francescalb)
  • +
+

v0.5.3 (2023-06-12)

+

Full Changelog

+

v0.5.3.1 (2023-06-12)

+

Full Changelog

+

Closed issues:

+
    +
  • Extend new_entity to include properties #609
  • +
  • Add support for Python 3.11 #599
  • +
  • excelparser - enable object properties creation #587
  • +
  • If there are altLabels that match, get_by_label_all returns only the prefLabels. #511
  • +
  • excel2onto: implement other annotations #462
  • +
+

Merged pull requests:

+ +

v0.5.2 (2023-05-24)

+

Full Changelog

+

Fixed bugs:

+
    +
  • Auto-merge dependabot PRs workflow invalid #566
  • +
+

Closed issues:

+
    +
  • Point to excelparser api from the tools-page #593
  • +
  • BUG: pytest - missing remote file /0.5.0/electrochemicalquantities / ontology #589
  • +
  • Owlready 0.41 support ? #588
  • +
  • Allow space in labels #583
  • +
  • is_defined needs a better description #563
  • +
  • utils line 112 in get_iri_name link = "{lowerlabel}" vs "{label}" #562
  • +
  • ontograph - update colour deafults #559
  • +
  • ontograph - argument leafs should be leaves #558
  • +
  • ontograph - write out more examples on how to use it #557
  • +
  • ontograph --parents not working #556
  • +
  • test_graph2 is failing #555
  • +
  • Add client side redirection in generated html documentation #552
  • +
  • Typos in PR template #523
  • +
  • ontograph, read format from name #497
  • +
  • Harmonize get_descendants and get_ancestors #406
  • +
  • Review default colours and style in ontopy/graph.py #345
  • +
+

Merged pull requests:

+ +

v0.5.1 (2023-02-07)

+

Full Changelog

+

Fixed bugs:

+
    +
  • Use custom token for GitHub changelog generator #545
  • +
  • Avoid using Azure mirror for APT packages #541
  • +
+

Merged pull requests:

+ +

v0.5.0 (2023-02-06)

+

Full Changelog

+

Fixed bugs:

+
    +
  • LegacyVersion does not exist in packaging.version #540
  • +
  • ontodoc: Expect is_instance_of property to be iterable #506
  • +
  • Reinstate images/material.png #495
  • +
+

Closed issues:

+
    +
  • Newest pylint (2.15.4) has intriduced some new rules. #534
  • +
  • sync_attributes according to emmo convention regenerates a new iri even if it already has a valid one #525
  • +
  • Remove dependency on LegacyVersion of packaging #514
  • +
  • pytests are importing packaging 22.0 even though it is not allowed in requirements #513
  • +
  • ontodoc: adding annotations that are not strings fail #510
  • +
  • get_by_label_all only works after sync_attributes #502
  • +
  • excel2onto: support updating ontology #501
  • +
  • excel2onto: allow to use prefLabel already in imported ontologies #500
  • +
  • Drop Python 3.6 support - extend Python >3.7 support #486
  • +
  • Update pypi-release github action #482
  • +
  • Make workflows dispatchable #481
  • +
  • excel2onto: Read catalog file for imported ontology #474
  • +
  • Give option to write_catalog for writing relative paths #473
  • +
  • excel2onto: add choice of prefix for imported ontologies #467
  • +
+

Merged pull requests:

+ +

v0.4.0 (2022-10-04)

+

Full Changelog

+

Fixed bugs:

+
    +
  • Update repo files with new repo name #479
  • +
  • Pre-commit hook bandit failing #478
  • +
  • Fix publish/release workflow #476
  • +
  • excel2onto: not all relations are included in the generated ontology #457
  • +
  • Unexpected behaviour of get_unabbreviated_triples() #454
  • +
  • Edge without label crash the graph creation #397
  • +
+

Closed issues:

+
    +
  • excel2onto: restrictions does not allow for using "emmo:hasProcessOutput some xx" #464
  • +
  • EMMO is updated to beta4, and now documentation fails #440
  • +
  • some ObjectProperties from EMMO-beta-4.0 cause errors in OntoGraph #429
  • +
  • Excelparser does not write catalog file correctly #421
  • +
  • Add support for prefix #416
  • +
  • Pre.commit failed with ontology.py #415
  • +
  • visualization of EMMO based ontology #412
  • +
  • Avoid infinite recursion when loading catalog file #369
  • +
  • Excelparser: Automatize emmo-based? #335
  • +
  • What are the applications of EMMO for materials informatics? #325
  • +
  • Provide 'support' for same entities with different namespaces #128
  • +
  • Remove deprecated emmo/ontograph.py that uses pydot #103
  • +
+

Merged pull requests:

+ +

v0.3.1 (2022-05-08)

+

Full Changelog

+

Merged pull requests:

+ +

v0.3.0 (2022-05-05)

+

Full Changelog

+

Fixed bugs:

+
    +
  • Documentation is currently not building #407
  • +
  • Pytest is currently failing #384
  • +
  • permission denied when working with temporary file #313
  • +
+

Closed issues:

+
    +
  • Make get_descendants(levels=1) #403
  • +
  • Add functionality for setting name part of IRI to prefLabel #398
  • +
  • Generate excelsheet from ontology. #394
  • +
  • Return a list of the concepts that are disregarded during when converting from excel with -force argument #393
  • +
  • Demo - Broken ontology URLs #390
  • +
  • Excelparser: how to handle entities that already exist in one of the imported ontologies? #334
  • +
+

Merged pull requests:

+ +

v0.2.0 (2022-03-02)

+

Full Changelog

+

Implemented enhancements:

+
    +
  • spaces before or after word in prefLabel makes excelparser fail #332
  • +
  • Make EMMOntopy PyPi #268
  • +
  • Use pre-commit #243
  • +
  • Standard dunder/magic methods for Ontology #228
  • +
  • Update code styling and linting #223
  • +
  • Fix checking PR body & improve error message in CD #318 (CasperWA)
  • +
+

Fixed bugs:

+
    +
  • GH GraphQL type issue for auto-merge workflow #374
  • +
  • Missing warning for excel parser relations and problem with "nan" #365
  • +
  • Seting metadata in excelparser fails if there are no imported ontologies. #331
  • +
  • Edge-case fails CD workflow for dependabot #319
  • +
  • Ontodoc failing due to wrong rdflib import #306
  • +
  • Overwriting get_triples() method #280
  • +
  • OpenModel logo not loading in README #278
  • +
  • Disable FOAF test as xmlns.com is down #276
  • +
+

Closed issues:

+
    +
  • Use TEAM 4.0[bot] for GH Actions jobs #352
  • +
  • _get_triples_spo take argumens s, and p, not subject and predicate #350
  • +
  • Add --force to excelparser #333
  • +
  • Cannot load ontology in Windows. #328
  • +
  • make get_ontology accept 'PosixPath' #326
  • +
  • Make EMMOntoPy baseexception and basewarning #321
  • +
  • get_by_label crash if not str #311
  • +
  • make excel parser that creates and ontology from a filled excel file #302
  • +
  • Check out how to get version of ontology #299
  • +
  • Let ontology.new_entity acccept one or more parents directly #294
  • +
  • Make ManchesterSyntaxParser that returns Owlready2 #293
  • +
  • onto.new_entity should throw Error if label name consists of more than one word #290
  • +
  • ReadTheDocs #288
  • +
  • Add logo to README #287
  • +
  • Write EMMO-python is deprecated and link to EMMOtopy on PyPi #269
  • +
  • Consider MarkDown header styling #231
  • +
+

Merged pull requests:

+ +

v0.1.3 (2021-10-27)

+

Full Changelog

+

v0.1.2 (2021-10-27)

+

Full Changelog

+

v0.1.1 (2021-10-27)

+

Full Changelog

+

v0.1.0 (2021-10-27)

+

Full Changelog

+

Implemented enhancements:

+
    +
  • "Warning" Importing from collections #236
  • +
+

Fixed bugs:

+
    +
  • Loading ontologies that do not import skos fails #261
  • +
  • Fix documentation build warnings #250
  • +
  • Fix images in documentation #233
  • +
  • Circular reference from Owlready2 #210
  • +
+

Closed issues:

+
    +
  • Write up transfer from EMMOpython to EMMOntoPy i README.md #267
  • +
  • Add test to emmocheck for upcoming EMMO #257
  • +
  • Add packaging as dependency in requirements #255
  • +
  • Add CI check for building documentation #244
  • +
  • Add OpenModel as contributing project #237
  • +
  • Update public documentation to new framework #234
  • +
  • Automate documentation releases #232
  • +
  • Update name of EMMO to Elemental Multiperspective Material Ontology #230
  • +
  • Tidy up unittests #220
  • +
  • Remove importability of sub-factpluspluswrapper folders #213
  • +
  • Make function that automatically loads emmo #209
  • +
  • Require rdflib>5.0.0? #206
  • +
  • change package name #205
  • +
  • test_catalog fails because seraching for .owl in emmo/master #203
  • +
  • Consider using mike for versioned documentation #197
  • +
  • Add a test that checks that loading of non-EMMO based ontologies work - e.g. do not require skos:prefLabel #196
  • +
  • Setup Materials for MkDocs framework #195
  • +
  • Clean up demo, examples and docs #193
  • +
  • Formalize review process with checklists #190
  • +
  • funksjon ontology.add_class(label, parent) #183
  • +
+

Merged pull requests:

+ +

v1.0.1b (2021-07-01)

+

Full Changelog

+

Closed issues:

+
    +
  • Correct updating of catalog in ontology.load #188
  • +
+

Merged pull requests:

+ +

v1.0.1 (2021-07-01)

+

Full Changelog

+

Fixed bugs:

+
    +
  • Windows paths are not handled properly #147
  • +
+

Closed issues:

+
    +
  • Failing tests when lodaing battinfo #185
  • +
  • Fix dependatbot to 'wider' #182
  • +
  • Change to get_label instead of asstring in ontograph, emmodoc, ontodoc, be careful #158
  • +
  • licence does not work with metadata #157
  • +
  • ontograph with several roots fails #153
  • +
  • fix redudant getlabel, get_preferred_label, get_label #152
  • +
  • add --no-catalog and default as in emmocheck for ontograph #150
  • +
  • make tests for checking upgrade of Owlready2 #137
  • +
  • Add periodic_table to examples #130
  • +
  • Add support for simple property-based ontology annotations like dcterms:license #129
  • +
  • Update documentation of tools re reasoner #123
  • +
  • Ontograph: Include multiple parents/inheritance #86
  • +
+

Merged pull requests:

+ +

v1.0.0 (2021-03-25)

+

Full Changelog

+

Closed issues:

+
    +
  • Use rdflib in Ontology.save() to support more file formats #143
  • +
  • Tool for publishing domain ontologies #140
  • +
+

Merged pull requests:

+
    +
  • Save to turtle and ontology annotations (via the metadata attribute) #144 (jesper-friis)
  • +
  • Corrected configuration of exceptions for test_class_label test. #142 (jesper-friis)
  • +
+

v1.0.0-alpha-30 (2021-03-18)

+

Full Changelog

+

Merged pull requests:

+ +

v1.0.0-alpha-29 (2021-03-16)

+

Full Changelog

+

Implemented enhancements:

+
    +
  • Add Wu&Palmer measure #134
  • +
+

Closed issues:

+
    +
  • Convert-imported update in utils #138
  • +
+

Merged pull requests:

+ +

v1.0.0-alpha-28 (2021-03-09)

+

Full Changelog

+

Closed issues:

+
    +
  • Also use the catalog file to map web URLs, not only local files. #109
  • +
  • Check Error with Owlready2-0.26 #81
  • +
+

Merged pull requests:

+ +

v1.0.0-alpha-27 (2021-02-27)

+

Full Changelog

+

Merged pull requests:

+ +

v1.0.0-alpha-26 (2021-02-26)

+

Full Changelog

+

Closed issues:

+
    +
  • Make fact++ reasoner available and default in tools #122
  • +
  • Use PyPI token in publish workflow #118
  • +
  • Update publish workflow #115
  • +
  • do something #108
  • +
+

Merged pull requests:

+ +

v1.0.0-alpha-25 (2021-01-17)

+

Full Changelog

+

Closed issues:

+
    +
  • Update Dockerfile to install correct pandoc #99
  • +
  • Correct turtle serialisation #97
  • +
+

Merged pull requests:

+ +

v1.0.0-alpha-24 (2021-01-04)

+

Full Changelog

+

Merged pull requests:

+ +

v1.0.0-alpha-23 (2021-01-04)

+

Full Changelog

+

Closed issues:

+
    +
  • Fix loading imported ttl from web such that emmocheck works for crystallography.ttl #98
  • +
  • Add reasoning with FaCT++ #95
  • +
  • Correctly load ontologies like crystallography that imports both local and online sub-ontologies #91
  • +
  • Fix flake8 errors #88
  • +
  • Remove the .ttl namespace when loading domain-crystallography in EMMO-python #83
  • +
  • Add option of documenting imported ontologies in ontodoc and ontograph #82
  • +
  • Emmocheck fails if Physicaluantities and MeaurementsUnits are not imported from emmo. Make sure that it does not fail if whole of EMMO is not imported. #80
  • +
  • Ontograph: Make default root #79
  • +
  • Ontodoc: PDF is not generated, produces error. #76
  • +
  • AttributeError from ontodoc #70
  • +
  • Import emmo .ttl from emmo-repo.github.io #69
  • +
  • Unable to use the vertical interoperability demo .py files #66
  • +
+

Merged pull requests:

+ +

v1.0.0-alpha-22 (2020-12-21)

+

Full Changelog

+

Merged pull requests:

+
    +
  • Loading ttl both locally and importing from iri #75 (francescalb)
  • +
  • Added sync_python_names() and corrected handling of individuals in sync_attributes() #73 (jesper-friis)
  • +
  • Add preflabel to individuals declared in python #72 (jesper-friis)
  • +
+

v1.0.0-alpha-21b (2020-12-13)

+

Full Changelog

+

Merged pull requests:

+ +

v1.0.0-alpha-21 (2020-12-11)

+

Full Changelog

+

Merged pull requests:

+ +

v1.0.0-alpha-20b (2020-11-04)

+

Full Changelog

+

Merged pull requests:

+ +

v1.0.0-alpha-20 (2020-11-04)

+

Full Changelog

+

Merged pull requests:

+ +

v1.0.0-alpha-19 (2020-11-02)

+

Full Changelog

+

Merged pull requests:

+ +

v1.0.0-alpha-18 (2020-10-29)

+

Full Changelog

+

Merged pull requests:

+ +

v1.0.0-alpha-17 (2020-10-21)

+

Full Changelog

+

Merged pull requests:

+
    +
  • Added materials.EngineeredMaterial to namespace exception in emmocheck #55 (francescalb)
  • +
+

v1.0.0-alpha-16 (2020-10-20)

+

Full Changelog

+

Closed issues:

+
    +
  • Include all annotations in .get_annotations() #50
  • +
+

Merged pull requests:

+ +

v1.0.0-alpha-15 (2020-09-25)

+

Full Changelog

+

Merged pull requests:

+ +

v1.0.0-alpha-13 (2020-09-19)

+

Full Changelog

+

Closed issues:

+
    +
  • Not immediately installable with pip #45
  • +
+

Merged pull requests:

+ +

v1.0.0-alpha-11 (2020-08-12)

+

Full Changelog

+

Merged pull requests:

+ +

v1.0.0-alpha-10 (2020-04-27)

+

Full Changelog

+

Merged pull requests:

+
    +
  • Added exceptions to emmocheck "test_number_of_labels" #39 (jesper-friis)
  • +
+

v1.0.0-alpha-9 (2020-04-13)

+

Full Changelog

+

Closed issues:

+
    +
  • Enhance ontology.sync_attributes() to also update class names #10
  • +
  • Add support for the FaCT++ reasoner #9
  • +
+

Merged pull requests:

+ +

v1.0.0-alpha-8 (2020-03-22)

+

Full Changelog

+

Merged pull requests:

+ +

v1.0.0-alpha-5 (2020-03-18)

+

Full Changelog

+

Implemented enhancements:

+
    +
  • Make EMMO-python available on pypi (installable with pip) #7
  • +
+

Merged pull requests:

+ +

v1.0.0-alpha-3 (2020-02-16)

+

Full Changelog

+

v1.0.0-alpha-2 (2020-01-11)

+

Full Changelog

+

v1.0.0-alpha-1 (2020-01-11)

+

Full Changelog

+

Closed issues:

+
    +
  • Missing https://emmc.info/emmo-inferred #16
  • +
  • setup.py #15
  • +
  • Fix emmodoc #6
  • +
+

v1.0.0-alpha (2020-01-08)

+

Full Changelog

+

Closed issues:

+
    +
  • Update the user case ontology #3
  • +
+

Merged pull requests:

+ +

v0.9.9 (2019-07-14)

+

Full Changelog

+

Closed issues:

+
    +
  • Homogenise call to reasoner in emmo.Ontology.sync_reasoner() #5
  • +
+

Merged pull requests:

+ +

* This Changelog was automatically generated by github_changelog_generator

+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/LICENSE/index.html b/0.6.1/LICENSE/index.html new file mode 100644 index 000000000..a62de8ff1 --- /dev/null +++ b/0.6.1/LICENSE/index.html @@ -0,0 +1,1313 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + License - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + +

License

+ +

Copyright 2019-2022 SINTEF

+

Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met:

+
    +
  1. +

    Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer.

    +
  2. +
  3. +

    Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution.

    +
  4. +
  5. +

    Neither the name of the copyright holder nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission.

    +
  6. +
+

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/emmopy/emmocheck/index.html b/0.6.1/api_reference/emmopy/emmocheck/index.html new file mode 100644 index 000000000..07a4e6a3b --- /dev/null +++ b/0.6.1/api_reference/emmopy/emmocheck/index.html @@ -0,0 +1,3405 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + emmocheck - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+
+ + + + +

emmocheck

+ + +
+ + +
+ +

A module for testing an ontology against conventions defined for EMMO.

+

A YAML file can be provided with additional test configurations.

+

Example configuration file:

+
test_unit_dimensions:
+  exceptions:
+    - myunits.MyUnitCategory1
+    - myunits.MyUnitCategory2
+
+skip:
+  - name_of_test_to_skip
+
+enable:
+  - name_of_test_to_enable
+
+ + + +
+ + + + + + + +
+ + + +

+ +TestEMMOConventions + + + +

+ +
+ +

Base class for testing an ontology against EMMO conventions.

+ +
+ Source code in emmopy/emmocheck.py +
class TestEMMOConventions(unittest.TestCase):
+    """Base class for testing an ontology against EMMO conventions."""
+
+    config = {}  # configurations
+
+    def get_config(self, string, default=None):
+        """Returns the configuration specified by `string`.
+
+        If configuration is not found in the configuration file, `default` is
+        returned.
+
+        Sub-configurations can be accessed by separating the components with
+        dots, like "test_namespace.exceptions".
+        """
+        result = self.config
+        try:
+            for token in string.split("."):
+                result = result[token]
+        except KeyError:
+            return default
+        return result
+
+
+ + + +
+ + + + + + + + + + +
+ + + +

+get_config(self, string, default=None) + + +

+ +
+ +

Returns the configuration specified by string.

+

If configuration is not found in the configuration file, default is +returned.

+

Sub-configurations can be accessed by separating the components with +dots, like "test_namespace.exceptions".

+ +
+ Source code in emmopy/emmocheck.py +
def get_config(self, string, default=None):
+    """Returns the configuration specified by `string`.
+
+    If configuration is not found in the configuration file, `default` is
+    returned.
+
+    Sub-configurations can be accessed by separating the components with
+    dots, like "test_namespace.exceptions".
+    """
+    result = self.config
+    try:
+        for token in string.split("."):
+            result = result[token]
+    except KeyError:
+        return default
+    return result
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + +
+ + + +

+ +TestFunctionalEMMOConventions + + + +

+ +
+ +

Test functional EMMO conventions.

+ +
+ Source code in emmopy/emmocheck.py +
class TestFunctionalEMMOConventions(TestEMMOConventions):
+    """Test functional EMMO conventions."""
+
+    def test_unit_dimension(self):
+        """Check that all measurement units have a physical dimension.
+
+        Configurations:
+            exceptions - full class names of classes to ignore.
+        """
+        exceptions = set(
+            (
+                "metrology.MultipleUnit",
+                "metrology.SubMultipleUnit",
+                "metrology.OffSystemUnit",
+                "metrology.PrefixedUnit",
+                "metrology.NonPrefixedUnit",
+                "metrology.SpecialUnit",
+                "metrology.DerivedUnit",
+                "metrology.BaseUnit",
+                "metrology.UnitSymbol",
+                "siunits.SICoherentDerivedUnit",
+                "siunits.SINonCoherentDerivedUnit",
+                "siunits.SISpecialUnit",
+                "siunits.SICoherentUnit",
+                "siunits.SIPrefixedUnit",
+                "siunits.SIBaseUnit",
+                "siunits.SIUnitSymbol",
+                "siunits.SIUnit",
+                "emmo.MultipleUnit",
+                "emmo.SubMultipleUnit",
+                "emmo.OffSystemUnit",
+                "emmo.PrefixedUnit",
+                "emmo.NonPrefixedUnit",
+                "emmo.SpecialUnit",
+                "emmo.DerivedUnit",
+                "emmo.BaseUnit",
+                "emmo.UnitSymbol",
+                "emmo.SIAccepted",
+                "emmo.SICoherentDerivedUnit",
+                "emmo.SINonCoherentDerivedUnit",
+                "emmo.SISpecialUnit",
+                "emmo.SICoherentUnit",
+                "emmo.SIPrefixedUnit",
+                "emmo.SIBaseUnit",
+                "emmo.SIUnitSymbol",
+                "emmo.SIUnit",
+            )
+        )
+        if not hasattr(self.onto, "MeasurementUnit"):
+            return
+        exceptions.update(self.get_config("test_unit_dimension.exceptions", ()))
+        regex = re.compile(r"^(emmo|metrology).hasDimensionString.value\(.*\)$")
+        classes = set(self.onto.classes(self.check_imported))
+        for cls in self.onto.MeasurementUnit.descendants():
+            if not self.check_imported and cls not in classes:
+                continue
+            # Assume that actual units are not subclassed
+            if not list(cls.subclasses()) and repr(cls) not in exceptions:
+                with self.subTest(cls=cls, label=get_label(cls)):
+                    self.assertTrue(
+                        any(
+                            regex.match(repr(r))
+                            for r in cls.get_indirect_is_a()
+                        ),
+                        msg=cls,
+                    )
+
+    def test_quantity_dimension_beta3(self):
+        """Check that all quantities have a physicalDimension annotation.
+
+        Note: this test will be deprecated when isq is moved to emmo/domain.
+
+        Configurations:
+            exceptions - full class names of classes to ignore.
+        """
+        exceptions = set(
+            (
+                "properties.ModelledQuantitativeProperty",
+                "properties.MeasuredQuantitativeProperty",
+                "properties.ConventionalQuantitativeProperty",
+                "metrology.QuantitativeProperty",
+                "metrology.Quantity",
+                "metrology.OrdinalQuantity",
+                "metrology.BaseQuantity",
+                "metrology.PhysicalConstant",
+                "metrology.PhysicalQuantity",
+                "metrology.ExactConstant",
+                "metrology.MeasuredConstant",
+                "metrology.DerivedQuantity",
+                "isq.ISQBaseQuantity",
+                "isq.InternationalSystemOfQuantity",
+                "isq.ISQDerivedQuantity",
+                "isq.SIExactConstant",
+                "emmo.ModelledQuantitativeProperty",
+                "emmo.MeasuredQuantitativeProperty",
+                "emmo.ConventionalQuantitativeProperty",
+                "emmo.QuantitativeProperty",
+                "emmo.Quantity",
+                "emmo.OrdinalQuantity",
+                "emmo.BaseQuantity",
+                "emmo.PhysicalConstant",
+                "emmo.PhysicalQuantity",
+                "emmo.ExactConstant",
+                "emmo.MeasuredConstant",
+                "emmo.DerivedQuantity",
+                "emmo.ISQBaseQuantity",
+                "emmo.InternationalSystemOfQuantity",
+                "emmo.ISQDerivedQuantity",
+                "emmo.SIExactConstant",
+                "emmo.NonSIUnits",
+                "emmo.StandardizedPhysicalQuantity",
+                "emmo.CategorizedPhysicalQuantity",
+                "emmo.AtomicAndNuclear",
+                "emmo.Defined",
+                "emmo.Electromagnetic",
+                "emmo.FrequentlyUsed",
+                "emmo.PhysicoChemical",
+                "emmo.ChemicalCompositionQuantity",
+                "emmo.Universal",
+            )
+        )
+        if not hasattr(self.onto, "PhysicalQuantity"):
+            return
+        exceptions.update(
+            self.get_config("test_quantity_dimension.exceptions", ())
+        )
+        regex = re.compile(
+            "^T([+-][1-9]|0) L([+-][1-9]|0) M([+-][1-9]|0) I([+-][1-9]|0) "
+            "(H|Θ)([+-][1-9]|0) N([+-][1-9]|0) J([+-][1-9]|0)$"
+        )
+        classes = set(self.onto.classes(self.check_imported))
+        for cls in self.onto.PhysicalQuantity.descendants():
+            if not self.check_imported and cls not in classes:
+                continue
+            if repr(cls) not in exceptions:
+                with self.subTest(cls=cls, label=get_label(cls)):
+                    anno = cls.get_annotations()
+                    self.assertIn("physicalDimension", anno, msg=cls)
+                    physdim = anno["physicalDimension"].first()
+                    self.assertRegex(physdim, regex, msg=cls)
+
+    def test_quantity_dimension(self):
+        """Check that all quantities have a physicalDimension.
+
+        Note: this test will be deprecated when isq is moved to emmo/domain.
+
+        Configurations:
+            exceptions - full class names of classes to ignore.
+        """
+        # pylint: disable=invalid-name
+        exceptions = set(
+            (
+                "properties.ModelledQuantitativeProperty",
+                "properties.MeasuredQuantitativeProperty",
+                "properties.ConventionalQuantitativeProperty",
+                "metrology.QuantitativeProperty",
+                "metrology.Quantity",
+                "metrology.OrdinalQuantity",
+                "metrology.BaseQuantity",
+                "metrology.PhysicalConstant",
+                "metrology.PhysicalQuantity",
+                "metrology.ExactConstant",
+                "metrology.MeasuredConstant",
+                "metrology.DerivedQuantity",
+                "isq.ISQBaseQuantity",
+                "isq.InternationalSystemOfQuantity",
+                "isq.ISQDerivedQuantity",
+                "isq.SIExactConstant",
+                "emmo.ModelledQuantitativeProperty",
+                "emmo.MeasuredQuantitativeProperty",
+                "emmo.ConventionalQuantitativeProperty",
+                "emmo.QuantitativeProperty",
+                "emmo.Quantity",
+                "emmo.OrdinalQuantity",
+                "emmo.BaseQuantity",
+                "emmo.PhysicalConstant",
+                "emmo.PhysicalQuantity",
+                "emmo.ExactConstant",
+                "emmo.MeasuredConstant",
+                "emmo.DerivedQuantity",
+                "emmo.ISQBaseQuantity",
+                "emmo.InternationalSystemOfQuantity",
+                "emmo.ISQDerivedQuantity",
+                "emmo.SIExactConstant",
+                "emmo.NonSIUnits",
+                "emmo.StandardizedPhysicalQuantity",
+                "emmo.CategorizedPhysicalQuantity",
+                "emmo.ISO80000Categorised",
+                "emmo.AtomicAndNuclear",
+                "emmo.Defined",
+                "emmo.Electromagnetic",
+                "emmo.FrequentlyUsed",
+                "emmo.ChemicalCompositionQuantity",
+                "emmo.EquilibriumConstant",  # physical dimension may change
+                "emmo.Solubility",
+                "emmo.Universal",
+                "emmo.Intensive",
+                "emmo.Extensive",
+                "emmo.Concentration",
+            )
+        )
+        if not hasattr(self.onto, "PhysicalQuantity"):
+            return
+        exceptions.update(
+            self.get_config("test_quantity_dimension.exceptions", ())
+        )
+        classes = set(self.onto.classes(self.check_imported))
+        for cls in self.onto.PhysicalQuantity.descendants():
+            if not self.check_imported and cls not in classes:
+                continue
+            if issubclass(cls, self.onto.ISO80000Categorised):
+                continue
+            if repr(cls) not in exceptions:
+                with self.subTest(cls=cls, label=get_label(cls)):
+                    for r in cls.get_indirect_is_a():
+                        if isinstance(r, owlready2.Restriction) and repr(
+                            r
+                        ).startswith("emmo.hasMeasurementUnit.some"):
+                            self.assertTrue(
+                                issubclass(
+                                    r.value,
+                                    (
+                                        self.onto.DimensionalUnit,
+                                        self.onto.DimensionlessUnit,
+                                    ),
+                                )
+                            )
+                            break
+                    else:
+                        self.assertTrue(
+                            issubclass(cls, self.onto.ISQDimensionlessQuantity)
+                        )
+
+    def test_dimensional_unit(self):
+        """Check correct syntax of dimension string of dimensional units."""
+
+        # This test requires that the ontology has imported SIDimensionalUnit
+        if "SIDimensionalUnit" not in self.onto:
+            self.skipTest("SIDimensionalUnit is not imported")
+
+        # pylint: disable=invalid-name
+        regex = re.compile(
+            "^T([+-][1-9][0-9]*|0) L([+-][1-9]|0) M([+-][1-9]|0) "
+            "I([+-][1-9]|0) (H|Θ)([+-][1-9]|0) N([+-][1-9]|0) "
+            "J([+-][1-9]|0)$"
+        )
+        for cls in self.onto.SIDimensionalUnit.__subclasses__():
+            with self.subTest(cls=cls, label=get_label(cls)):
+                self.assertEqual(len(cls.equivalent_to), 1)
+                r = cls.equivalent_to[0]
+                self.assertIsInstance(r, owlready2.Restriction)
+                self.assertRegex(r.value, regex)
+
+    def test_physical_quantity_dimension(self):
+        """Check that all physical quantities have `hasPhysicalDimension`.
+
+        Note: this test will fail before isq is moved to emmo/domain.
+
+        Configurations:
+            exceptions - full class names of classes to ignore.
+
+        """
+        exceptions = set(
+            (
+                "emmo.ModelledQuantitativeProperty",
+                "emmo.MeasuredQuantitativeProperty",
+                "emmo.ConventionalQuantitativeProperty",
+                "emmo.QuantitativeProperty",
+                "emmo.BaseQuantity",
+                "emmo.PhysicalConstant",
+                "emmo.PhysicalQuantity",
+                "emmo.ExactConstant",
+                "emmo.MeasuredConstant",
+                "emmo.DerivedQuantity",
+                "emmo.ISQBaseQuantity",
+                "emmo.InternationalSystemOfQuantity",
+                "emmo.ISQDerivedQuantity",
+                "emmo.SIExactConstant",
+                "emmo.NonSIUnits",
+                "emmo.StandardizedPhysicalQuantity",
+                "emmo.CategorizedPhysicalQuantity",
+                "emmo.AtomicAndNuclearPhysicsQuantity",
+                "emmo.ThermodynamicalQuantity",
+                "emmo.LightAndRadiationQuantity",
+                "emmo.SpaceAndTimeQuantity",
+                "emmo.AcousticQuantity",
+                "emmo.PhysioChememicalQuantity",
+                "emmo.ElectromagneticQuantity",
+                "emmo.MechanicalQuantity",
+                "emmo.CondensedMatterPhysicsQuantity",
+                "emmo.ChemicalCompositionQuantity",
+                "emmo.Extensive",
+                "emmo.Intensive",
+            )
+        )
+        if not hasattr(self.onto, "PhysicalQuantity"):
+            return
+        exceptions.update(
+            self.get_config("test_physical_quantity_dimension.exceptions", ())
+        )
+        classes = set(self.onto.classes(self.check_imported))
+        for cls in self.onto.PhysicalQuantity.descendants():
+            if not self.check_imported and cls not in classes:
+                continue
+            if repr(cls) not in exceptions:
+                with self.subTest(cls=cls, label=get_label(cls)):
+                    try:
+                        class_props = cls.INDIRECT_get_class_properties()
+                    except AttributeError:
+                        # The INDIRECT_get_class_properties() method
+                        # does not support inverse properties.  Build
+                        # class_props manually...
+                        class_props = set()
+                        for _ in cls.mro():
+                            if hasattr(_, "is_a"):
+                                class_props.update(
+                                    [
+                                        restriction.property
+                                        for restriction in _.is_a
+                                        if isinstance(
+                                            restriction, owlready2.Restriction
+                                        )
+                                    ]
+                                )
+
+                    self.assertIn(
+                        self.onto.hasPhysicalDimension, class_props, msg=cls
+                    )
+
+    def test_namespace(self):
+        """Check that all IRIs are namespaced after their (sub)ontology.
+
+        Configurations:
+            exceptions - full name of entities to ignore.
+        """
+        exceptions = set(
+            (
+                "owl.qualifiedCardinality",
+                "owl.minQualifiedCardinality",
+                "terms.creator",
+                "terms.contributor",
+                "terms.publisher",
+                "terms.title",
+                "terms.license",
+                "terms.abstract",
+                "core.prefLabel",
+                "core.altLabel",
+                "core.hiddenLabel",
+                "mereotopology.Item",
+                "manufacturing.EngineeredMaterial",
+            )
+        )
+        exceptions.update(self.get_config("test_namespace.exceptions", ()))
+
+        def checker(onto, ignore_namespace):
+            if list(
+                filter(onto.base_iri.strip("#").endswith, self.ignore_namespace)
+            ):
+                print(f"Skipping namespace: {onto.base_iri}")
+                return
+            entities = itertools.chain(
+                onto.classes(),
+                onto.object_properties(),
+                onto.data_properties(),
+                onto.individuals(),
+                onto.annotation_properties(),
+            )
+            for entity in entities:
+                if entity not in visited and repr(entity) not in exceptions:
+                    visited.add(entity)
+                    with self.subTest(
+                        iri=entity.iri,
+                        base_iri=onto.base_iri,
+                        entity=repr(entity),
+                    ):
+                        self.assertTrue(
+                            entity.iri.endswith(entity.name),
+                            msg=(
+                                "the final part of entity IRIs must be their "
+                                "name"
+                            ),
+                        )
+                        self.assertEqual(
+                            entity.iri,
+                            onto.base_iri + entity.name,
+                            msg=(
+                                f"IRI {entity.iri!r} does not correspond to "
+                                f"module namespace: {onto.base_iri!r}"
+                            ),
+                        )
+
+            if self.check_imported:
+                for imp_onto in onto.imported_ontologies:
+                    if imp_onto not in visited_onto:
+                        visited_onto.add(imp_onto)
+                        checker(imp_onto, ignore_namespace)
+
+        visited = set()
+        visited_onto = set()
+        checker(self.onto, self.ignore_namespace)
+
+
+ + + +
+ + + + + + + + + +
+ + + +

+test_dimensional_unit(self) + + +

+ +
+ +

Check correct syntax of dimension string of dimensional units.

+ +
+ Source code in emmopy/emmocheck.py +
def test_dimensional_unit(self):
+    """Check correct syntax of dimension string of dimensional units."""
+
+    # This test requires that the ontology has imported SIDimensionalUnit
+    if "SIDimensionalUnit" not in self.onto:
+        self.skipTest("SIDimensionalUnit is not imported")
+
+    # pylint: disable=invalid-name
+    regex = re.compile(
+        "^T([+-][1-9][0-9]*|0) L([+-][1-9]|0) M([+-][1-9]|0) "
+        "I([+-][1-9]|0) (H|Θ)([+-][1-9]|0) N([+-][1-9]|0) "
+        "J([+-][1-9]|0)$"
+    )
+    for cls in self.onto.SIDimensionalUnit.__subclasses__():
+        with self.subTest(cls=cls, label=get_label(cls)):
+            self.assertEqual(len(cls.equivalent_to), 1)
+            r = cls.equivalent_to[0]
+            self.assertIsInstance(r, owlready2.Restriction)
+            self.assertRegex(r.value, regex)
+
+
+
+ +
+ + + +
+ + + +

+test_namespace(self) + + +

+ +
+ +

Check that all IRIs are namespaced after their (sub)ontology.

+
+

Configurations

+

exceptions - full name of entities to ignore.

+
+ +
+ Source code in emmopy/emmocheck.py +
def test_namespace(self):
+    """Check that all IRIs are namespaced after their (sub)ontology.
+
+    Configurations:
+        exceptions - full name of entities to ignore.
+    """
+    exceptions = set(
+        (
+            "owl.qualifiedCardinality",
+            "owl.minQualifiedCardinality",
+            "terms.creator",
+            "terms.contributor",
+            "terms.publisher",
+            "terms.title",
+            "terms.license",
+            "terms.abstract",
+            "core.prefLabel",
+            "core.altLabel",
+            "core.hiddenLabel",
+            "mereotopology.Item",
+            "manufacturing.EngineeredMaterial",
+        )
+    )
+    exceptions.update(self.get_config("test_namespace.exceptions", ()))
+
+    def checker(onto, ignore_namespace):
+        if list(
+            filter(onto.base_iri.strip("#").endswith, self.ignore_namespace)
+        ):
+            print(f"Skipping namespace: {onto.base_iri}")
+            return
+        entities = itertools.chain(
+            onto.classes(),
+            onto.object_properties(),
+            onto.data_properties(),
+            onto.individuals(),
+            onto.annotation_properties(),
+        )
+        for entity in entities:
+            if entity not in visited and repr(entity) not in exceptions:
+                visited.add(entity)
+                with self.subTest(
+                    iri=entity.iri,
+                    base_iri=onto.base_iri,
+                    entity=repr(entity),
+                ):
+                    self.assertTrue(
+                        entity.iri.endswith(entity.name),
+                        msg=(
+                            "the final part of entity IRIs must be their "
+                            "name"
+                        ),
+                    )
+                    self.assertEqual(
+                        entity.iri,
+                        onto.base_iri + entity.name,
+                        msg=(
+                            f"IRI {entity.iri!r} does not correspond to "
+                            f"module namespace: {onto.base_iri!r}"
+                        ),
+                    )
+
+        if self.check_imported:
+            for imp_onto in onto.imported_ontologies:
+                if imp_onto not in visited_onto:
+                    visited_onto.add(imp_onto)
+                    checker(imp_onto, ignore_namespace)
+
+    visited = set()
+    visited_onto = set()
+    checker(self.onto, self.ignore_namespace)
+
+
+
+ +
+ + + +
+ + + +

+test_physical_quantity_dimension(self) + + +

+ +
+ +

Check that all physical quantities have hasPhysicalDimension.

+

Note: this test will fail before isq is moved to emmo/domain.

+
+

Configurations

+

exceptions - full class names of classes to ignore.

+
+ +
+ Source code in emmopy/emmocheck.py +
def test_physical_quantity_dimension(self):
+    """Check that all physical quantities have `hasPhysicalDimension`.
+
+    Note: this test will fail before isq is moved to emmo/domain.
+
+    Configurations:
+        exceptions - full class names of classes to ignore.
+
+    """
+    exceptions = set(
+        (
+            "emmo.ModelledQuantitativeProperty",
+            "emmo.MeasuredQuantitativeProperty",
+            "emmo.ConventionalQuantitativeProperty",
+            "emmo.QuantitativeProperty",
+            "emmo.BaseQuantity",
+            "emmo.PhysicalConstant",
+            "emmo.PhysicalQuantity",
+            "emmo.ExactConstant",
+            "emmo.MeasuredConstant",
+            "emmo.DerivedQuantity",
+            "emmo.ISQBaseQuantity",
+            "emmo.InternationalSystemOfQuantity",
+            "emmo.ISQDerivedQuantity",
+            "emmo.SIExactConstant",
+            "emmo.NonSIUnits",
+            "emmo.StandardizedPhysicalQuantity",
+            "emmo.CategorizedPhysicalQuantity",
+            "emmo.AtomicAndNuclearPhysicsQuantity",
+            "emmo.ThermodynamicalQuantity",
+            "emmo.LightAndRadiationQuantity",
+            "emmo.SpaceAndTimeQuantity",
+            "emmo.AcousticQuantity",
+            "emmo.PhysioChememicalQuantity",
+            "emmo.ElectromagneticQuantity",
+            "emmo.MechanicalQuantity",
+            "emmo.CondensedMatterPhysicsQuantity",
+            "emmo.ChemicalCompositionQuantity",
+            "emmo.Extensive",
+            "emmo.Intensive",
+        )
+    )
+    if not hasattr(self.onto, "PhysicalQuantity"):
+        return
+    exceptions.update(
+        self.get_config("test_physical_quantity_dimension.exceptions", ())
+    )
+    classes = set(self.onto.classes(self.check_imported))
+    for cls in self.onto.PhysicalQuantity.descendants():
+        if not self.check_imported and cls not in classes:
+            continue
+        if repr(cls) not in exceptions:
+            with self.subTest(cls=cls, label=get_label(cls)):
+                try:
+                    class_props = cls.INDIRECT_get_class_properties()
+                except AttributeError:
+                    # The INDIRECT_get_class_properties() method
+                    # does not support inverse properties.  Build
+                    # class_props manually...
+                    class_props = set()
+                    for _ in cls.mro():
+                        if hasattr(_, "is_a"):
+                            class_props.update(
+                                [
+                                    restriction.property
+                                    for restriction in _.is_a
+                                    if isinstance(
+                                        restriction, owlready2.Restriction
+                                    )
+                                ]
+                            )
+
+                self.assertIn(
+                    self.onto.hasPhysicalDimension, class_props, msg=cls
+                )
+
+
+
+ +
+ + + +
+ + + +

+test_quantity_dimension(self) + + +

+ +
+ +

Check that all quantities have a physicalDimension.

+

Note: this test will be deprecated when isq is moved to emmo/domain.

+
+

Configurations

+

exceptions - full class names of classes to ignore.

+
+ +
+ Source code in emmopy/emmocheck.py +
def test_quantity_dimension(self):
+    """Check that all quantities have a physicalDimension.
+
+    Note: this test will be deprecated when isq is moved to emmo/domain.
+
+    Configurations:
+        exceptions - full class names of classes to ignore.
+    """
+    # pylint: disable=invalid-name
+    exceptions = set(
+        (
+            "properties.ModelledQuantitativeProperty",
+            "properties.MeasuredQuantitativeProperty",
+            "properties.ConventionalQuantitativeProperty",
+            "metrology.QuantitativeProperty",
+            "metrology.Quantity",
+            "metrology.OrdinalQuantity",
+            "metrology.BaseQuantity",
+            "metrology.PhysicalConstant",
+            "metrology.PhysicalQuantity",
+            "metrology.ExactConstant",
+            "metrology.MeasuredConstant",
+            "metrology.DerivedQuantity",
+            "isq.ISQBaseQuantity",
+            "isq.InternationalSystemOfQuantity",
+            "isq.ISQDerivedQuantity",
+            "isq.SIExactConstant",
+            "emmo.ModelledQuantitativeProperty",
+            "emmo.MeasuredQuantitativeProperty",
+            "emmo.ConventionalQuantitativeProperty",
+            "emmo.QuantitativeProperty",
+            "emmo.Quantity",
+            "emmo.OrdinalQuantity",
+            "emmo.BaseQuantity",
+            "emmo.PhysicalConstant",
+            "emmo.PhysicalQuantity",
+            "emmo.ExactConstant",
+            "emmo.MeasuredConstant",
+            "emmo.DerivedQuantity",
+            "emmo.ISQBaseQuantity",
+            "emmo.InternationalSystemOfQuantity",
+            "emmo.ISQDerivedQuantity",
+            "emmo.SIExactConstant",
+            "emmo.NonSIUnits",
+            "emmo.StandardizedPhysicalQuantity",
+            "emmo.CategorizedPhysicalQuantity",
+            "emmo.ISO80000Categorised",
+            "emmo.AtomicAndNuclear",
+            "emmo.Defined",
+            "emmo.Electromagnetic",
+            "emmo.FrequentlyUsed",
+            "emmo.ChemicalCompositionQuantity",
+            "emmo.EquilibriumConstant",  # physical dimension may change
+            "emmo.Solubility",
+            "emmo.Universal",
+            "emmo.Intensive",
+            "emmo.Extensive",
+            "emmo.Concentration",
+        )
+    )
+    if not hasattr(self.onto, "PhysicalQuantity"):
+        return
+    exceptions.update(
+        self.get_config("test_quantity_dimension.exceptions", ())
+    )
+    classes = set(self.onto.classes(self.check_imported))
+    for cls in self.onto.PhysicalQuantity.descendants():
+        if not self.check_imported and cls not in classes:
+            continue
+        if issubclass(cls, self.onto.ISO80000Categorised):
+            continue
+        if repr(cls) not in exceptions:
+            with self.subTest(cls=cls, label=get_label(cls)):
+                for r in cls.get_indirect_is_a():
+                    if isinstance(r, owlready2.Restriction) and repr(
+                        r
+                    ).startswith("emmo.hasMeasurementUnit.some"):
+                        self.assertTrue(
+                            issubclass(
+                                r.value,
+                                (
+                                    self.onto.DimensionalUnit,
+                                    self.onto.DimensionlessUnit,
+                                ),
+                            )
+                        )
+                        break
+                else:
+                    self.assertTrue(
+                        issubclass(cls, self.onto.ISQDimensionlessQuantity)
+                    )
+
+
+
+ +
+ + + +
+ + + +

+test_quantity_dimension_beta3(self) + + +

+ +
+ +

Check that all quantities have a physicalDimension annotation.

+

Note: this test will be deprecated when isq is moved to emmo/domain.

+
+

Configurations

+

exceptions - full class names of classes to ignore.

+
+ +
+ Source code in emmopy/emmocheck.py +
def test_quantity_dimension_beta3(self):
+    """Check that all quantities have a physicalDimension annotation.
+
+    Note: this test will be deprecated when isq is moved to emmo/domain.
+
+    Configurations:
+        exceptions - full class names of classes to ignore.
+    """
+    exceptions = set(
+        (
+            "properties.ModelledQuantitativeProperty",
+            "properties.MeasuredQuantitativeProperty",
+            "properties.ConventionalQuantitativeProperty",
+            "metrology.QuantitativeProperty",
+            "metrology.Quantity",
+            "metrology.OrdinalQuantity",
+            "metrology.BaseQuantity",
+            "metrology.PhysicalConstant",
+            "metrology.PhysicalQuantity",
+            "metrology.ExactConstant",
+            "metrology.MeasuredConstant",
+            "metrology.DerivedQuantity",
+            "isq.ISQBaseQuantity",
+            "isq.InternationalSystemOfQuantity",
+            "isq.ISQDerivedQuantity",
+            "isq.SIExactConstant",
+            "emmo.ModelledQuantitativeProperty",
+            "emmo.MeasuredQuantitativeProperty",
+            "emmo.ConventionalQuantitativeProperty",
+            "emmo.QuantitativeProperty",
+            "emmo.Quantity",
+            "emmo.OrdinalQuantity",
+            "emmo.BaseQuantity",
+            "emmo.PhysicalConstant",
+            "emmo.PhysicalQuantity",
+            "emmo.ExactConstant",
+            "emmo.MeasuredConstant",
+            "emmo.DerivedQuantity",
+            "emmo.ISQBaseQuantity",
+            "emmo.InternationalSystemOfQuantity",
+            "emmo.ISQDerivedQuantity",
+            "emmo.SIExactConstant",
+            "emmo.NonSIUnits",
+            "emmo.StandardizedPhysicalQuantity",
+            "emmo.CategorizedPhysicalQuantity",
+            "emmo.AtomicAndNuclear",
+            "emmo.Defined",
+            "emmo.Electromagnetic",
+            "emmo.FrequentlyUsed",
+            "emmo.PhysicoChemical",
+            "emmo.ChemicalCompositionQuantity",
+            "emmo.Universal",
+        )
+    )
+    if not hasattr(self.onto, "PhysicalQuantity"):
+        return
+    exceptions.update(
+        self.get_config("test_quantity_dimension.exceptions", ())
+    )
+    regex = re.compile(
+        "^T([+-][1-9]|0) L([+-][1-9]|0) M([+-][1-9]|0) I([+-][1-9]|0) "
+        "(H|Θ)([+-][1-9]|0) N([+-][1-9]|0) J([+-][1-9]|0)$"
+    )
+    classes = set(self.onto.classes(self.check_imported))
+    for cls in self.onto.PhysicalQuantity.descendants():
+        if not self.check_imported and cls not in classes:
+            continue
+        if repr(cls) not in exceptions:
+            with self.subTest(cls=cls, label=get_label(cls)):
+                anno = cls.get_annotations()
+                self.assertIn("physicalDimension", anno, msg=cls)
+                physdim = anno["physicalDimension"].first()
+                self.assertRegex(physdim, regex, msg=cls)
+
+
+
+ +
+ + + +
+ + + +

+test_unit_dimension(self) + + +

+ +
+ +

Check that all measurement units have a physical dimension.

+
+

Configurations

+

exceptions - full class names of classes to ignore.

+
+ +
+ Source code in emmopy/emmocheck.py +
def test_unit_dimension(self):
+    """Check that all measurement units have a physical dimension.
+
+    Configurations:
+        exceptions - full class names of classes to ignore.
+    """
+    exceptions = set(
+        (
+            "metrology.MultipleUnit",
+            "metrology.SubMultipleUnit",
+            "metrology.OffSystemUnit",
+            "metrology.PrefixedUnit",
+            "metrology.NonPrefixedUnit",
+            "metrology.SpecialUnit",
+            "metrology.DerivedUnit",
+            "metrology.BaseUnit",
+            "metrology.UnitSymbol",
+            "siunits.SICoherentDerivedUnit",
+            "siunits.SINonCoherentDerivedUnit",
+            "siunits.SISpecialUnit",
+            "siunits.SICoherentUnit",
+            "siunits.SIPrefixedUnit",
+            "siunits.SIBaseUnit",
+            "siunits.SIUnitSymbol",
+            "siunits.SIUnit",
+            "emmo.MultipleUnit",
+            "emmo.SubMultipleUnit",
+            "emmo.OffSystemUnit",
+            "emmo.PrefixedUnit",
+            "emmo.NonPrefixedUnit",
+            "emmo.SpecialUnit",
+            "emmo.DerivedUnit",
+            "emmo.BaseUnit",
+            "emmo.UnitSymbol",
+            "emmo.SIAccepted",
+            "emmo.SICoherentDerivedUnit",
+            "emmo.SINonCoherentDerivedUnit",
+            "emmo.SISpecialUnit",
+            "emmo.SICoherentUnit",
+            "emmo.SIPrefixedUnit",
+            "emmo.SIBaseUnit",
+            "emmo.SIUnitSymbol",
+            "emmo.SIUnit",
+        )
+    )
+    if not hasattr(self.onto, "MeasurementUnit"):
+        return
+    exceptions.update(self.get_config("test_unit_dimension.exceptions", ()))
+    regex = re.compile(r"^(emmo|metrology).hasDimensionString.value\(.*\)$")
+    classes = set(self.onto.classes(self.check_imported))
+    for cls in self.onto.MeasurementUnit.descendants():
+        if not self.check_imported and cls not in classes:
+            continue
+        # Assume that actual units are not subclassed
+        if not list(cls.subclasses()) and repr(cls) not in exceptions:
+            with self.subTest(cls=cls, label=get_label(cls)):
+                self.assertTrue(
+                    any(
+                        regex.match(repr(r))
+                        for r in cls.get_indirect_is_a()
+                    ),
+                    msg=cls,
+                )
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + +
+ + + +

+ +TestSyntacticEMMOConventions + + + +

+ +
+ +

Test syntactic EMMO conventions.

+ +
+ Source code in emmopy/emmocheck.py +
class TestSyntacticEMMOConventions(TestEMMOConventions):
+    """Test syntactic EMMO conventions."""
+
+    def test_number_of_labels(self):
+        """Check that all entities have one and only one prefLabel.
+
+        Use "altLabel" for synonyms.
+
+        The only allowed exception is entities who's representation
+        starts with "owl.".
+        """
+        exceptions = set(
+            (
+                "terms.license",
+                "terms.abstract",
+                "terms.contributor",
+                "terms.creator",
+                "terms.publisher",
+                "terms.title",
+                "core.prefLabel",
+                "core.altLabel",
+                "core.hiddenLabel",
+                "foaf.logo",
+                "0.1.logo",  # foaf.logo
+            )
+        )
+        exceptions.update(
+            self.get_config("test_number_of_labels.exceptions", ())
+        )
+        if (
+            "prefLabel"
+            in self.onto.world._props  # pylint: disable=protected-access
+        ):
+            for entity in self.onto.classes(self.check_imported):
+                if repr(entity) not in exceptions:
+                    with self.subTest(
+                        entity=entity,
+                        label=get_label(entity),
+                        prefLabels=entity.prefLabel,
+                    ):
+                        if not repr(entity).startswith("owl."):
+                            self.assertTrue(hasattr(entity, "prefLabel"))
+                            self.assertEqual(1, len(entity.prefLabel))
+        else:
+            self.fail("ontology has no prefLabel")
+
+    def test_class_label(self):
+        """Check that class labels are CamelCase and valid identifiers.
+
+        For CamelCase, we are currently only checking that the labels
+        start with upper case.
+        """
+        exceptions = set(
+            (
+                "0-manifold",  # not needed in 1.0.0-beta
+                "1-manifold",
+                "2-manifold",
+                "3-manifold",
+                "C++",
+                "3DPrinting",
+            )
+        )
+        exceptions.update(self.get_config("test_class_label.exceptions", ()))
+
+        for cls in self.onto.classes(self.check_imported):
+            for label in cls.label + getattr(cls, "prefLabel", []):
+                if str(label) not in exceptions:
+                    with self.subTest(entity=cls, label=label):
+                        self.assertTrue(label.isidentifier())
+                        self.assertTrue(label[0].isupper())
+
+    def test_object_property_label(self):
+        """Check that object property labels are lowerCamelCase.
+
+        Allowed exceptions: "EMMORelation"
+
+        If they start with "has" or "is" they should be followed by a
+        upper case letter.
+
+        If they start with "is" they should also end with "Of".
+        """
+        exceptions = set(("EMMORelation",))
+        exceptions.update(
+            self.get_config("test_object_property_label.exceptions", ())
+        )
+
+        for obj_prop in self.onto.object_properties():
+            if repr(obj_prop) not in exceptions:
+                for label in obj_prop.label:
+                    with self.subTest(entity=obj_prop, label=label):
+                        self.assertTrue(
+                            label[0].islower(), "label start with lowercase"
+                        )
+                        if label.startswith("has"):
+                            self.assertTrue(
+                                label[3].isupper(),
+                                'what follows "has" must be "uppercase"',
+                            )
+                        if label.startswith("is"):
+                            self.assertTrue(
+                                label[2].isupper(),
+                                'what follows "is" must be "uppercase"',
+                            )
+                            self.assertTrue(
+                                label.endswith(("Of", "With")),
+                                'should end with "Of" or "With"',
+                            )
+
+
+ + + +
+ + + + + + + + + +
+ + + +

+test_class_label(self) + + +

+ +
+ +

Check that class labels are CamelCase and valid identifiers.

+

For CamelCase, we are currently only checking that the labels +start with upper case.

+ +
+ Source code in emmopy/emmocheck.py +
def test_class_label(self):
+    """Check that class labels are CamelCase and valid identifiers.
+
+    For CamelCase, we are currently only checking that the labels
+    start with upper case.
+    """
+    exceptions = set(
+        (
+            "0-manifold",  # not needed in 1.0.0-beta
+            "1-manifold",
+            "2-manifold",
+            "3-manifold",
+            "C++",
+            "3DPrinting",
+        )
+    )
+    exceptions.update(self.get_config("test_class_label.exceptions", ()))
+
+    for cls in self.onto.classes(self.check_imported):
+        for label in cls.label + getattr(cls, "prefLabel", []):
+            if str(label) not in exceptions:
+                with self.subTest(entity=cls, label=label):
+                    self.assertTrue(label.isidentifier())
+                    self.assertTrue(label[0].isupper())
+
+
+
+ +
+ + + +
+ + + +

+test_number_of_labels(self) + + +

+ +
+ +

Check that all entities have one and only one prefLabel.

+

Use "altLabel" for synonyms.

+

The only allowed exception is entities who's representation +starts with "owl.".

+ +
+ Source code in emmopy/emmocheck.py +
def test_number_of_labels(self):
+    """Check that all entities have one and only one prefLabel.
+
+    Use "altLabel" for synonyms.
+
+    The only allowed exception is entities who's representation
+    starts with "owl.".
+    """
+    exceptions = set(
+        (
+            "terms.license",
+            "terms.abstract",
+            "terms.contributor",
+            "terms.creator",
+            "terms.publisher",
+            "terms.title",
+            "core.prefLabel",
+            "core.altLabel",
+            "core.hiddenLabel",
+            "foaf.logo",
+            "0.1.logo",  # foaf.logo
+        )
+    )
+    exceptions.update(
+        self.get_config("test_number_of_labels.exceptions", ())
+    )
+    if (
+        "prefLabel"
+        in self.onto.world._props  # pylint: disable=protected-access
+    ):
+        for entity in self.onto.classes(self.check_imported):
+            if repr(entity) not in exceptions:
+                with self.subTest(
+                    entity=entity,
+                    label=get_label(entity),
+                    prefLabels=entity.prefLabel,
+                ):
+                    if not repr(entity).startswith("owl."):
+                        self.assertTrue(hasattr(entity, "prefLabel"))
+                        self.assertEqual(1, len(entity.prefLabel))
+    else:
+        self.fail("ontology has no prefLabel")
+
+
+
+ +
+ + + +
+ + + +

+test_object_property_label(self) + + +

+ +
+ +

Check that object property labels are lowerCamelCase.

+

Allowed exceptions: "EMMORelation"

+

If they start with "has" or "is" they should be followed by a +upper case letter.

+

If they start with "is" they should also end with "Of".

+ +
+ Source code in emmopy/emmocheck.py +
def test_object_property_label(self):
+    """Check that object property labels are lowerCamelCase.
+
+    Allowed exceptions: "EMMORelation"
+
+    If they start with "has" or "is" they should be followed by a
+    upper case letter.
+
+    If they start with "is" they should also end with "Of".
+    """
+    exceptions = set(("EMMORelation",))
+    exceptions.update(
+        self.get_config("test_object_property_label.exceptions", ())
+    )
+
+    for obj_prop in self.onto.object_properties():
+        if repr(obj_prop) not in exceptions:
+            for label in obj_prop.label:
+                with self.subTest(entity=obj_prop, label=label):
+                    self.assertTrue(
+                        label[0].islower(), "label start with lowercase"
+                    )
+                    if label.startswith("has"):
+                        self.assertTrue(
+                            label[3].isupper(),
+                            'what follows "has" must be "uppercase"',
+                        )
+                    if label.startswith("is"):
+                        self.assertTrue(
+                            label[2].isupper(),
+                            'what follows "is" must be "uppercase"',
+                        )
+                        self.assertTrue(
+                            label.endswith(("Of", "With")),
+                            'should end with "Of" or "With"',
+                        )
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + +
+ + + +

+main(argv=None) + + +

+ +
+ +

Run all checks on ontology iri.

+

Default is 'http://emmo.info/emmo'.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
argvlist

List of arguments, similar to sys.argv[1:]. +Mainly for testing purposes, since it allows one to invoke the tool +manually / through Python.

None
+
+ Source code in emmopy/emmocheck.py +
def main(
+    argv: list = None,
+):  # pylint: disable=too-many-locals,too-many-branches,too-many-statements
+    """Run all checks on ontology `iri`.
+
+    Default is 'http://emmo.info/emmo'.
+
+    Parameters:
+        argv: List of arguments, similar to `sys.argv[1:]`.
+            Mainly for testing purposes, since it allows one to invoke the tool
+            manually / through Python.
+
+    """
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument("iri", help="File name or URI to the ontology to test.")
+    parser.add_argument(
+        "--database",
+        "-d",
+        metavar="FILENAME",
+        default=":memory:",
+        help=(
+            "Load ontology from Owlready2 sqlite3 database. The `iri` argument"
+            " should in this case be the IRI of the ontology you want to "
+            "check."
+        ),
+    )
+    parser.add_argument(
+        "--local",
+        "-l",
+        action="store_true",
+        help=(
+            "Load imported ontologies locally.  Their paths are specified in "
+            "Protègè catalog files or via the --path option.  The IRI should "
+            "be a file name."
+        ),
+    )
+    parser.add_argument(
+        "--catalog-file",
+        default="catalog-v001.xml",
+        help=(
+            "Name of Protègè catalog file in the same folder as the ontology. "
+            "This option is used together with --local and defaults to "
+            '"catalog-v001.xml".'
+        ),
+    )
+    parser.add_argument(
+        "--path",
+        action="append",
+        default=[],
+        help=(
+            "Paths where imported ontologies can be found. May be provided as "
+            "a comma-separated string and/or with multiple --path options."
+        ),
+    )
+    parser.add_argument(
+        "--check-imported",
+        "-i",
+        action="store_true",
+        help="Whether to check imported ontologies.",
+    )
+    parser.add_argument(
+        "--verbose", "-v", action="store_true", help="Verbosity level."
+    )
+    parser.add_argument(
+        "--configfile",
+        "-c",
+        help="A yaml file with additional test configurations.",
+    )
+    parser.add_argument(
+        "--skip",
+        "-s",
+        action="append",
+        default=[],
+        help=(
+            "Shell pattern matching tests to skip.  This option may be "
+            "provided multiple times."
+        ),
+    )
+    parser.add_argument(
+        "--enable",
+        "-e",
+        action="append",
+        default=[],
+        help=(
+            "Shell pattern matching tests to enable that have been skipped by "
+            "default or in the config file.  This option may be provided "
+            "multiple times."
+        ),
+    )
+    parser.add_argument(  # deprecated, replaced by --no-catalog
+        "--url-from-catalog",
+        "-u",
+        default=None,
+        action="store_true",
+        help="Get url from catalog file",
+    )
+    parser.add_argument(
+        "--no-catalog",
+        action="store_false",
+        dest="url_from_catalog",
+        default=None,
+        help="Whether to not read catalog file even if it exists.",
+    )
+    parser.add_argument(
+        "--ignore-namespace",
+        "-n",
+        action="append",
+        default=[],
+        help="Namespace to be ignored. Can be given multiple times",
+    )
+
+    # Options to pass forward to unittest
+    parser.add_argument(
+        "--buffer",
+        "-b",
+        dest="unittest",
+        action="append_const",
+        const="-b",
+        help=(
+            "The standard output and standard error streams are buffered "
+            "during the test run. Output during a passing test is discarded. "
+            "Output is echoed normally on test fail or error and is added to "
+            "the failure messages."
+        ),
+    )
+    parser.add_argument(
+        "--catch",
+        dest="unittest",
+        action="append_const",
+        const="-c",
+        help=(
+            "Control-C during the test run waits for the current test to end "
+            "and then reports all the results so far. A second control-C "
+            "raises the normal KeyboardInterrupt exception"
+        ),
+    )
+    parser.add_argument(
+        "--failfast",
+        "-f",
+        dest="unittest",
+        action="append_const",
+        const="-f",
+        help="Stop the test run on the first error or failure.",
+    )
+    try:
+        args = parser.parse_args(args=argv)
+        sys.argv[1:] = args.unittest if args.unittest else []
+        if args.verbose:
+            sys.argv.append("-v")
+    except SystemExit as exc:
+        sys.exit(exc.code)  # Exit without traceback on invalid arguments
+
+    # Append to onto_path
+    for paths in args.path:
+        for path in paths.split(","):
+            if path not in onto_path:
+                onto_path.append(path)
+
+    # Load ontology
+    world = World(filename=args.database)
+    if args.database != ":memory:" and args.iri not in world.ontologies:
+        parser.error(
+            "The IRI argument should be one of the ontologies in "
+            "the database:\n  " + "\n  ".join(world.ontologies.keys())
+        )
+
+    onto = world.get_ontology(args.iri)
+    onto.load(
+        only_local=args.local,
+        url_from_catalog=args.url_from_catalog,
+        catalog_file=args.catalog_file,
+    )
+
+    # Store settings TestEMMOConventions
+    TestEMMOConventions.onto = onto
+    TestEMMOConventions.check_imported = args.check_imported
+    TestEMMOConventions.ignore_namespace = args.ignore_namespace
+
+    # Configure tests
+    verbosity = 2 if args.verbose else 1
+    if args.configfile:
+        import yaml  # pylint: disable=import-outside-toplevel
+
+        with open(args.configfile, "rt") as handle:
+            TestEMMOConventions.config.update(
+                yaml.load(handle, Loader=yaml.SafeLoader)
+            )
+
+    # Run all subclasses of TestEMMOConventions as test suites
+    status = 0
+    for cls in TestEMMOConventions.__subclasses__():
+        # pylint: disable=cell-var-from-loop,undefined-loop-variable
+
+        suite = unittest.TestLoader().loadTestsFromTestCase(cls)
+
+        # Mark tests to be skipped
+        for test in suite:
+            name = test.id().split(".")[-1]
+            skipped = set(  # skipped by default
+                [
+                    "test_namespace",
+                    "test_physical_quantity_dimension_annotation",
+                    "test_quantity_dimension_beta3",
+                    "test_physical_quantity_dimension",
+                ]
+            )
+            msg = {name: "skipped by default" for name in skipped}
+
+            # enable/skip tests from config file
+            for pattern in test.get_config("enable", ()):
+                if fnmatch.fnmatchcase(name, pattern):
+                    skipped.remove(name)
+            for pattern in test.get_config("skip", ()):
+                if fnmatch.fnmatchcase(name, pattern):
+                    skipped.add(name)
+                    msg[name] = "skipped from config file"
+
+            # enable/skip from command line
+            for pattern in args.enable:
+                if fnmatch.fnmatchcase(name, pattern):
+                    skipped.remove(name)
+            for pattern in args.skip:
+                if fnmatch.fnmatchcase(name, pattern):
+                    skipped.add(name)
+                    msg[name] = "skipped from command line"
+
+            if name in skipped:
+                setattr(test, "setUp", lambda: test.skipTest(msg.get(name, "")))
+
+        runner = TextTestRunner(verbosity=verbosity)
+        runner.resultclass.checkmode = True
+        result = runner.run(suite)
+        if result.failures:
+            status = 1
+
+    return status
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/emmopy/emmopy/index.html b/0.6.1/api_reference/emmopy/emmopy/index.html new file mode 100644 index 000000000..fab8d2938 --- /dev/null +++ b/0.6.1/api_reference/emmopy/emmopy/index.html @@ -0,0 +1,1694 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + emmopy - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+
+ + + + +

emmopy

+ + +
+ + +
+ +

emmopy.emmopy

+

Automagically retrieve the EMMO utilizing +ontopy.get_ontology.

+ + + +
+ + + + + + + + +
+ + + +

+get_emmo(inferred=True) + + +

+ +
+ +

Returns the current version of emmo.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
inferredOptional[bool]

Whether to import the inferred version of emmo or not. +Default is True.

True
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Ontology

The loaded emmo ontology.

+
+ Source code in emmopy/emmopy.py +
def get_emmo(inferred: Optional[bool] = True) -> "Ontology":
+    """Returns the current version of emmo.
+
+    Args:
+        inferred: Whether to import the inferred version of emmo or not.
+            Default is True.
+
+    Returns:
+        The loaded emmo ontology.
+
+    """
+    name = "emmo-inferred" if inferred in [True, None] else "emmo"
+    return get_ontology(name).load(prefix_emmo=True)
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/ontopy/colortest/index.html b/0.6.1/api_reference/ontopy/colortest/index.html new file mode 100644 index 000000000..52fe75e75 --- /dev/null +++ b/0.6.1/api_reference/ontopy/colortest/index.html @@ -0,0 +1,4601 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + colortest - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + +

colortest

+ + +
+ + +
+ +

ontopy.colortest

+

Print tests in colors.

+

Adapted from https://github.com/meshy/colour-runner by Charlie Denton +License: MIT

+ + + +
+ + + + + + + +
+ + + +

+ +ColourTextTestResult (TestResult) + + + + +

+ +
+ +

A test result class that prints colour formatted text results to a stream.

+

Based on https://github.com/python/cpython/blob/3.3/Lib/unittest/runner.py

+ +
+ Source code in ontopy/colortest.py +
class ColourTextTestResult(TestResult):
+    """
+    A test result class that prints colour formatted text results to a stream.
+
+    Based on https://github.com/python/cpython/blob/3.3/Lib/unittest/runner.py
+    """
+
+    formatter = formatters.Terminal256Formatter()  # pylint: disable=no-member
+    lexer = Lexer()
+    separator1 = "=" * 70
+    separator2 = "-" * 70
+    indent = " " * 4
+    # if `checkmode` is true, simplified output will be generated with
+    # no traceback
+    checkmode = False
+    _terminal = Terminal()
+    colours = {
+        None: str,
+        "error": _terminal.bold_red,
+        "expected": _terminal.blue,
+        # "fail": _terminal.bold_yellow,
+        "fail": _terminal.bold_magenta,
+        "skip": str,
+        "success": _terminal.green,
+        "title": _terminal.blue,
+        "unexpected": _terminal.bold_red,
+    }
+
+    _test_class = None
+
+    def __init__(self, stream, descriptions, verbosity):
+        super().__init__(stream, descriptions, verbosity)
+        self.stream = stream
+        self.show_all = verbosity > 1
+        self.dots = verbosity == 1
+        self.descriptions = descriptions
+
+    def getShortDescription(self, test):
+        doc_first_line = test.shortDescription()
+        if self.descriptions and doc_first_line:
+            return self.indent + doc_first_line
+        return self.indent + test._testMethodName
+
+    def getLongDescription(self, test):
+        doc_first_line = test.shortDescription()
+        if self.descriptions and doc_first_line:
+            return "\n".join((str(test), doc_first_line))
+        return str(test)
+
+    def getClassDescription(self, test):
+        test_class = test.__class__
+        doc = test_class.__doc__
+        if self.descriptions and doc:
+            return doc.split("\n")[0].strip()
+        return strclass(test_class)
+
+    def startTest(self, test):
+        super().startTest(test)
+        pos = 0
+        if self.show_all:
+            if self._test_class != test.__class__:
+                self._test_class = test.__class__
+                title = self.getClassDescription(test)
+                self.stream.writeln(self.colours["title"](title))
+            descr = self.getShortDescription(test)
+            self.stream.write(descr)
+            pos += len(descr)
+            self.stream.write(" " * (70 - pos))
+            # self.stream.write(' ' * (self._terminal.width - 10 - pos))
+            # self.stream.write(' ... ')
+            self.stream.flush()
+
+    def printResult(self, short, extended, colour_key=None):
+        colour = self.colours[colour_key]
+        if self.show_all:
+            self.stream.writeln(colour(extended))
+        elif self.dots:
+            self.stream.write(colour(short))
+            self.stream.flush()
+
+    def addSuccess(self, test):
+        super().addSuccess(test)
+        self.printResult(".", "ok", "success")
+
+    def addError(self, test, err):
+        super().addError(test, err)
+        self.printResult("E", "ERROR", "error")
+
+    def addFailure(self, test, err):
+        super().addFailure(test, err)
+        self.printResult("F", "FAIL", "fail")
+
+    def addSkip(self, test, reason):
+        super().addSkip(test, reason)
+        if self.checkmode:
+            self.printResult("s", "skipped", "skip")
+        else:
+            self.printResult("s", f"skipped {reason!r}", "skip")
+
+    def addExpectedFailure(self, test, err):
+        super().addExpectedFailure(test, err)
+        self.printResult("x", "expected failure", "expected")
+
+    def addUnexpectedSuccess(self, test):
+        super().addUnexpectedSuccess(test)
+        self.printResult("u", "unexpected success", "unexpected")
+
+    def printErrors(self):
+        if self.dots or self.show_all:
+            self.stream.writeln()
+        self.printErrorList("ERROR", self.errors)
+        self.printErrorList("FAIL", self.failures)
+
+    def printErrorList(self, flavour, errors):
+        colour = self.colours[flavour.lower()]
+
+        for test, err in errors:
+            if self.checkmode and flavour == "FAIL":
+                self.stream.writeln(self.separator1)
+                title = f"{flavour}: {test.shortDescription()}"
+                self.stream.writeln(colour(title))
+                self.stream.writeln(str(test))
+                if self.show_all:
+                    self.stream.writeln(self.separator2)
+                    lines = str(err).split("\n")
+                    i = 1
+                    for line in lines[1:]:
+                        if line.startswith(" "):
+                            i += 1
+                        else:
+                            break
+                    self.stream.writeln(
+                        highlight(
+                            "\n".join(lines[i:]), self.lexer, self.formatter
+                        )
+                    )
+            else:
+                self.stream.writeln(self.separator1)
+                title = f"{flavour}: {self.getLongDescription(test)}"
+                self.stream.writeln(colour(title))
+                self.stream.writeln(self.separator2)
+                self.stream.writeln(highlight(err, self.lexer, self.formatter))
+
+
+ + + +
+ + + + + + + + + + + + + + + + + +
+ + + +

+addError(self, test, err) + + +

+ +
+ +

Called when an error has occurred. 'err' is a tuple of values as +returned by sys.exc_info().

+ +
+ Source code in ontopy/colortest.py +
def addError(self, test, err):
+    super().addError(test, err)
+    self.printResult("E", "ERROR", "error")
+
+
+
+ +
+ + + +
+ + + +

+addExpectedFailure(self, test, err) + + +

+ +
+ +

Called when an expected failure/error occurred.

+ +
+ Source code in ontopy/colortest.py +
def addExpectedFailure(self, test, err):
+    super().addExpectedFailure(test, err)
+    self.printResult("x", "expected failure", "expected")
+
+
+
+ +
+ + + +
+ + + +

+addFailure(self, test, err) + + +

+ +
+ +

Called when an error has occurred. 'err' is a tuple of values as +returned by sys.exc_info().

+ +
+ Source code in ontopy/colortest.py +
def addFailure(self, test, err):
+    super().addFailure(test, err)
+    self.printResult("F", "FAIL", "fail")
+
+
+
+ +
+ + + +
+ + + +

+addSkip(self, test, reason) + + +

+ +
+ +

Called when a test is skipped.

+ +
+ Source code in ontopy/colortest.py +
def addSkip(self, test, reason):
+    super().addSkip(test, reason)
+    if self.checkmode:
+        self.printResult("s", "skipped", "skip")
+    else:
+        self.printResult("s", f"skipped {reason!r}", "skip")
+
+
+
+ +
+ + + +
+ + + +

+addSuccess(self, test) + + +

+ +
+ +

Called when a test has completed successfully

+ +
+ Source code in ontopy/colortest.py +
def addSuccess(self, test):
+    super().addSuccess(test)
+    self.printResult(".", "ok", "success")
+
+
+
+ +
+ + + +
+ + + +

+addUnexpectedSuccess(self, test) + + +

+ +
+ +

Called when a test was expected to fail, but succeed.

+ +
+ Source code in ontopy/colortest.py +
def addUnexpectedSuccess(self, test):
+    super().addUnexpectedSuccess(test)
+    self.printResult("u", "unexpected success", "unexpected")
+
+
+
+ +
+ + + + + + + +
+ + + +

+printErrors(self) + + +

+ +
+ +

Called by TestRunner after test run

+ +
+ Source code in ontopy/colortest.py +
def printErrors(self):
+    if self.dots or self.show_all:
+        self.stream.writeln()
+    self.printErrorList("ERROR", self.errors)
+    self.printErrorList("FAIL", self.failures)
+
+
+
+ +
+ + + + +
+ + + +

+startTest(self, test) + + +

+ +
+ +

Called when the given test is about to be run

+ +
+ Source code in ontopy/colortest.py +
def startTest(self, test):
+    super().startTest(test)
+    pos = 0
+    if self.show_all:
+        if self._test_class != test.__class__:
+            self._test_class = test.__class__
+            title = self.getClassDescription(test)
+            self.stream.writeln(self.colours["title"](title))
+        descr = self.getShortDescription(test)
+        self.stream.write(descr)
+        pos += len(descr)
+        self.stream.write(" " * (70 - pos))
+        # self.stream.write(' ' * (self._terminal.width - 10 - pos))
+        # self.stream.write(' ... ')
+        self.stream.flush()
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + +
+ + + +

+ +ColourTextTestRunner (TextTestRunner) + + + + +

+ +
+ +

A test runner that uses colour in its output.

+ +
+ Source code in ontopy/colortest.py +
class ColourTextTestRunner(
+    TextTestRunner
+):  # pylint: disable=too-few-public-methods
+    """A test runner that uses colour in its output."""
+
+    resultclass = ColourTextTestResult
+
+
+ + + +
+ + + + + + + +
+ + + +

+ +resultclass (TestResult) + + + + +

+ +
+ +

A test result class that prints colour formatted text results to a stream.

+

Based on https://github.com/python/cpython/blob/3.3/Lib/unittest/runner.py

+ +
+ Source code in ontopy/colortest.py +
class ColourTextTestResult(TestResult):
+    """
+    A test result class that prints colour formatted text results to a stream.
+
+    Based on https://github.com/python/cpython/blob/3.3/Lib/unittest/runner.py
+    """
+
+    formatter = formatters.Terminal256Formatter()  # pylint: disable=no-member
+    lexer = Lexer()
+    separator1 = "=" * 70
+    separator2 = "-" * 70
+    indent = " " * 4
+    # if `checkmode` is true, simplified output will be generated with
+    # no traceback
+    checkmode = False
+    _terminal = Terminal()
+    colours = {
+        None: str,
+        "error": _terminal.bold_red,
+        "expected": _terminal.blue,
+        # "fail": _terminal.bold_yellow,
+        "fail": _terminal.bold_magenta,
+        "skip": str,
+        "success": _terminal.green,
+        "title": _terminal.blue,
+        "unexpected": _terminal.bold_red,
+    }
+
+    _test_class = None
+
+    def __init__(self, stream, descriptions, verbosity):
+        super().__init__(stream, descriptions, verbosity)
+        self.stream = stream
+        self.show_all = verbosity > 1
+        self.dots = verbosity == 1
+        self.descriptions = descriptions
+
+    def getShortDescription(self, test):
+        doc_first_line = test.shortDescription()
+        if self.descriptions and doc_first_line:
+            return self.indent + doc_first_line
+        return self.indent + test._testMethodName
+
+    def getLongDescription(self, test):
+        doc_first_line = test.shortDescription()
+        if self.descriptions and doc_first_line:
+            return "\n".join((str(test), doc_first_line))
+        return str(test)
+
+    def getClassDescription(self, test):
+        test_class = test.__class__
+        doc = test_class.__doc__
+        if self.descriptions and doc:
+            return doc.split("\n")[0].strip()
+        return strclass(test_class)
+
+    def startTest(self, test):
+        super().startTest(test)
+        pos = 0
+        if self.show_all:
+            if self._test_class != test.__class__:
+                self._test_class = test.__class__
+                title = self.getClassDescription(test)
+                self.stream.writeln(self.colours["title"](title))
+            descr = self.getShortDescription(test)
+            self.stream.write(descr)
+            pos += len(descr)
+            self.stream.write(" " * (70 - pos))
+            # self.stream.write(' ' * (self._terminal.width - 10 - pos))
+            # self.stream.write(' ... ')
+            self.stream.flush()
+
+    def printResult(self, short, extended, colour_key=None):
+        colour = self.colours[colour_key]
+        if self.show_all:
+            self.stream.writeln(colour(extended))
+        elif self.dots:
+            self.stream.write(colour(short))
+            self.stream.flush()
+
+    def addSuccess(self, test):
+        super().addSuccess(test)
+        self.printResult(".", "ok", "success")
+
+    def addError(self, test, err):
+        super().addError(test, err)
+        self.printResult("E", "ERROR", "error")
+
+    def addFailure(self, test, err):
+        super().addFailure(test, err)
+        self.printResult("F", "FAIL", "fail")
+
+    def addSkip(self, test, reason):
+        super().addSkip(test, reason)
+        if self.checkmode:
+            self.printResult("s", "skipped", "skip")
+        else:
+            self.printResult("s", f"skipped {reason!r}", "skip")
+
+    def addExpectedFailure(self, test, err):
+        super().addExpectedFailure(test, err)
+        self.printResult("x", "expected failure", "expected")
+
+    def addUnexpectedSuccess(self, test):
+        super().addUnexpectedSuccess(test)
+        self.printResult("u", "unexpected success", "unexpected")
+
+    def printErrors(self):
+        if self.dots or self.show_all:
+            self.stream.writeln()
+        self.printErrorList("ERROR", self.errors)
+        self.printErrorList("FAIL", self.failures)
+
+    def printErrorList(self, flavour, errors):
+        colour = self.colours[flavour.lower()]
+
+        for test, err in errors:
+            if self.checkmode and flavour == "FAIL":
+                self.stream.writeln(self.separator1)
+                title = f"{flavour}: {test.shortDescription()}"
+                self.stream.writeln(colour(title))
+                self.stream.writeln(str(test))
+                if self.show_all:
+                    self.stream.writeln(self.separator2)
+                    lines = str(err).split("\n")
+                    i = 1
+                    for line in lines[1:]:
+                        if line.startswith(" "):
+                            i += 1
+                        else:
+                            break
+                    self.stream.writeln(
+                        highlight(
+                            "\n".join(lines[i:]), self.lexer, self.formatter
+                        )
+                    )
+            else:
+                self.stream.writeln(self.separator1)
+                title = f"{flavour}: {self.getLongDescription(test)}"
+                self.stream.writeln(colour(title))
+                self.stream.writeln(self.separator2)
+                self.stream.writeln(highlight(err, self.lexer, self.formatter))
+
+
+ + + +
+ + + + + + + + + + + + + + + + + +
+ + + +

+addError(self, test, err) + + +

+ +
+ +

Called when an error has occurred. 'err' is a tuple of values as +returned by sys.exc_info().

+ +
+ Source code in ontopy/colortest.py +
def addError(self, test, err):
+    super().addError(test, err)
+    self.printResult("E", "ERROR", "error")
+
+
+
+ +
+ + + +
+ + + +

+addExpectedFailure(self, test, err) + + +

+ +
+ +

Called when an expected failure/error occurred.

+ +
+ Source code in ontopy/colortest.py +
def addExpectedFailure(self, test, err):
+    super().addExpectedFailure(test, err)
+    self.printResult("x", "expected failure", "expected")
+
+
+
+ +
+ + + +
+ + + +

+addFailure(self, test, err) + + +

+ +
+ +

Called when an error has occurred. 'err' is a tuple of values as +returned by sys.exc_info().

+ +
+ Source code in ontopy/colortest.py +
def addFailure(self, test, err):
+    super().addFailure(test, err)
+    self.printResult("F", "FAIL", "fail")
+
+
+
+ +
+ + + +
+ + + +

+addSkip(self, test, reason) + + +

+ +
+ +

Called when a test is skipped.

+ +
+ Source code in ontopy/colortest.py +
def addSkip(self, test, reason):
+    super().addSkip(test, reason)
+    if self.checkmode:
+        self.printResult("s", "skipped", "skip")
+    else:
+        self.printResult("s", f"skipped {reason!r}", "skip")
+
+
+
+ +
+ + + +
+ + + +

+addSuccess(self, test) + + +

+ +
+ +

Called when a test has completed successfully

+ +
+ Source code in ontopy/colortest.py +
def addSuccess(self, test):
+    super().addSuccess(test)
+    self.printResult(".", "ok", "success")
+
+
+
+ +
+ + + +
+ + + +

+addUnexpectedSuccess(self, test) + + +

+ +
+ +

Called when a test was expected to fail, but succeed.

+ +
+ Source code in ontopy/colortest.py +
def addUnexpectedSuccess(self, test):
+    super().addUnexpectedSuccess(test)
+    self.printResult("u", "unexpected success", "unexpected")
+
+
+
+ +
+ + + + + + + +
+ + + +

+printErrors(self) + + +

+ +
+ +

Called by TestRunner after test run

+ +
+ Source code in ontopy/colortest.py +
def printErrors(self):
+    if self.dots or self.show_all:
+        self.stream.writeln()
+    self.printErrorList("ERROR", self.errors)
+    self.printErrorList("FAIL", self.failures)
+
+
+
+ +
+ + + + +
+ + + +

+startTest(self, test) + + +

+ +
+ +

Called when the given test is about to be run

+ +
+ Source code in ontopy/colortest.py +
def startTest(self, test):
+    super().startTest(test)
+    pos = 0
+    if self.show_all:
+        if self._test_class != test.__class__:
+            self._test_class = test.__class__
+            title = self.getClassDescription(test)
+            self.stream.writeln(self.colours["title"](title))
+        descr = self.getShortDescription(test)
+        self.stream.write(descr)
+        pos += len(descr)
+        self.stream.write(" " * (70 - pos))
+        # self.stream.write(' ' * (self._terminal.width - 10 - pos))
+        # self.stream.write(' ... ')
+        self.stream.flush()
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + + + + +
+ +
+ +
+ + + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/ontopy/excelparser/index.html b/0.6.1/api_reference/ontopy/excelparser/index.html new file mode 100644 index 000000000..c0ba59cc7 --- /dev/null +++ b/0.6.1/api_reference/ontopy/excelparser/index.html @@ -0,0 +1,2313 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + excelparser - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+ +
+ + + +
+
+ + + + +

excelparser

+ + +
+ + +
+ +

Module from parsing an excelfile and creating an +ontology from it.

+

The excelfile is read by pandas and the pandas +dataframe should have column names: +prefLabel, altLabel, Elucidation, Comments, Examples, +subClassOf, Relations.

+

Note that correct case is mandatory.

+ + + +
+ + + + + + + +
+ + + +

+ +ExcelError (EMMOntoPyException) + + + + +

+ +
+ +

Raised on errors in Excel file.

+ +
+ Source code in ontopy/excelparser.py +
class ExcelError(EMMOntoPyException):
+    """Raised on errors in Excel file."""
+
+
+ + +
+ +
+ + + + +
+ + + +

+create_ontology_from_excel(excelpath, concept_sheet_name='Concepts', metadata_sheet_name='Metadata', imports_sheet_name='ImportedOntologies', dataproperties_sheet_name='DataProperties', objectproperties_sheet_name='ObjectProperties', annotationproperties_sheet_name='AnnotationProperties', base_iri='http://emmo.info/emmo/domain/onto#', base_iri_from_metadata=True, imports=None, catalog=None, force=False, input_ontology=None) + + +

+ +
+ +

Creates an ontology from an Excel-file.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
excelpathstr

Path to Excel workbook.

required
concept_sheet_namestr

Name of sheet where concepts are defined. +The second row of this sheet should contain column names that are +supported. Currently these are 'prefLabel','altLabel', +'Elucidation', 'Comments', 'Examples', 'subClassOf', 'Relations'. +Multiple entries are separated with ';'.

'Concepts'
metadata_sheet_namestr

Name of sheet where metadata are defined. +The first row contains column names 'Metadata name' and 'Value' +Supported 'Metadata names' are: 'Ontology IRI', +'Ontology vesion IRI', 'Ontology version Info', 'Title', +'Abstract', 'License', 'Comment', 'Author', 'Contributor'. +Multiple entries are separated with a semi-colon (;).

'Metadata'
imports_sheet_namestr

Name of sheet where imported ontologies are +defined. +Column name is 'Imported ontologies'. +Fully resolvable URL or path to imported ontologies provided one +per row.

'ImportedOntologies'
dataproperties_sheet_namestr

Name of sheet where data properties are +defined. The second row of this sheet should contain column names +that are supported. Currently these are 'prefLabel','altLabel', +'Elucidation', 'Comments', 'Examples', 'subPropertyOf', +'Domain', 'Range', 'dijointWith', 'equivalentTo'.

'DataProperties'
annotationproperties_sheet_namestr

Name of sheet where annotation +properties are defined. The second row of this sheet should contain +column names that are supported. Currently these are 'prefLabel', +'altLabel', 'Elucidation', 'Comments', 'Examples', 'subPropertyOf', +'Domain', 'Range'.

'AnnotationProperties'
objectproperties_sheet_namestr

Name of sheet where object properties are +defined.The second row of this sheet should contain column names +that are supported. Currently these are 'prefLabel','altLabel', +'Elucidation', 'Comments', 'Examples', 'subPropertyOf', +'Domain', 'Range', 'inverseOf', 'dijointWith', 'equivalentTo'.

'ObjectProperties'
base_iristr

Base IRI of the new ontology.

'http://emmo.info/emmo/domain/onto#'
base_iri_from_metadatabool

Whether to use base IRI defined from metadata.

True
importslist

List of imported ontologies.

None
catalogdict

Imported ontologies with (name, full path) key/value-pairs.

None
forcebool

Forcibly make an ontology by skipping concepts +that are erroneously defined or other errors in the excel sheet.

False
input_ontologyOptional[ontopy.ontology.Ontology]

Ontology that should be updated. +Default is None, +which means that a completely new ontology is generated. +If an input_ontology to be updated is provided, +the metadata sheet in the excel sheet will not be considered.

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
A tuple with the
    +
  • created ontology
      +
    • associated catalog of ontology names and resolvable path as dict
    • +
    • +

      a dictionary with lists of concepts that raise errors, with the + following keys:

      +
        +
      • "already_defined": These are concepts (classes) + that are already in the + ontology, because they were already added in a + previous line of the excelfile/pandas dataframe, or because + it is already defined in an imported ontology with the same + base_iri as the newly created ontology.
      • +
      • "in_imported_ontologies": Concepts (classes) + that are defined in the + excel, but already exist in the imported ontologies.
      • +
      • "wrongly_defined": Concepts (classes) that are given an + invalid prefLabel (e.g. with a space in the name).
      • +
      • "missing_subClassOf": Concepts (classes) that are missing + parents. These concepts are added directly under owl:Thing.
      • +
      • "invalid_subClassOf": Concepts (classes) with invalidly + defined parents. + These concepts are added directly under owl:Thing.
      • +
      • "nonadded_concepts": List of all concepts (classes) that are + not added, + either because the prefLabel is invalid, or because the + concept has already been added once or already exists in an + imported ontology.
      • +
      • "obj_prop_already_defined": Object properties that are already + defined in the ontology.
      • +
      • "obj_prop_in_imported_ontologies": Object properties that are + defined in the excel, but already exist in the imported + ontologies.
      • +
      • "obj_prop_wrongly_defined": Object properties that are given + an invalid prefLabel (e.g. with a space in the name).
      • +
      • "obj_prop_missing_subPropertyOf": Object properties that are + missing parents.
      • +
      • "obj_prop_invalid_subPropertyOf": Object properties with + invalidly defined parents.
      • +
      • "obj_prop_nonadded_entities": List of all object properties + that are not added, either because the prefLabel is invalid, + or because the concept has already been added once or + already exists in an imported ontology.
      • +
      • "obj_prop_errors_in_properties": Object properties with + invalidly defined properties.
      • +
      • "obj_prop_errors_in_range": Object properties with invalidly + defined range.
      • +
      • "obj_prop_errors_in_domain": Object properties with invalidly + defined domain.
      • +
      • "annot_prop_already_defined": Annotation properties that are + already defined in the ontology.
      • +
      • "annot_prop_in_imported_ontologies": Annotation properties + that + are defined in the excel, but already exist in the imported + ontologies.
      • +
      • "annot_prop_wrongly_defined": Annotation properties that are + given an invalid prefLabel (e.g. with a space in the name).
      • +
      • "annot_prop_missing_subPropertyOf": Annotation properties that + are missing parents.
      • +
      • "annot_prop_invalid_subPropertyOf": Annotation properties with + invalidly defined parents.
      • +
      • "annot_prop_nonadded_entities": List of all annotation + properties that are not added, either because the prefLabel + is invalid, or because the concept has already been added + once or already exists in an imported ontology.
      • +
      • "annot_prop_errors_in_properties": Annotation properties with + invalidly defined properties.
      • +
      • "data_prop_already_defined": Data properties that are already + defined in the ontology.
      • +
      • "data_prop_in_imported_ontologies": Data properties that are + defined in the excel, but already exist in the imported + ontologies.
      • +
      • "data_prop_wrongly_defined": Data properties that are given + an invalid prefLabel (e.g. with a space in the name).
      • +
      • "data_prop_missing_subPropertyOf": Data properties that are + missing parents.
      • +
      • "data_prop_invalid_subPropertyOf": Data properties with + invalidly defined parents.
      • +
      • "data_prop_nonadded_entities": List of all data properties + that are not added, either because the prefLabel is invalid, + or because the concept has already been added once or + already exists in an imported ontology.
      • +
      • "data_prop_errors_in_properties": Data properties with + invalidly defined properties.
      • +
      • "data_prop_errors_in_range": Data properties with invalidly + defined range.
      • +
      • "data_prop_errors_in_domain": Data properties with invalidly + defined domain.
      • +
      +
    • +
    +
  • +
+
+ Source code in ontopy/excelparser.py +
def create_ontology_from_excel(  # pylint: disable=too-many-arguments, too-many-locals
+    excelpath: str,
+    concept_sheet_name: str = "Concepts",
+    metadata_sheet_name: str = "Metadata",
+    imports_sheet_name: str = "ImportedOntologies",
+    dataproperties_sheet_name: str = "DataProperties",
+    objectproperties_sheet_name: str = "ObjectProperties",
+    annotationproperties_sheet_name: str = "AnnotationProperties",
+    base_iri: str = "http://emmo.info/emmo/domain/onto#",
+    base_iri_from_metadata: bool = True,
+    imports: list = None,
+    catalog: dict = None,
+    force: bool = False,
+    input_ontology: Union[ontopy.ontology.Ontology, None] = None,
+) -> Tuple[ontopy.ontology.Ontology, dict, dict]:
+    """
+    Creates an ontology from an Excel-file.
+
+    Arguments:
+        excelpath: Path to Excel workbook.
+        concept_sheet_name: Name of sheet where concepts are defined.
+            The second row of this sheet should contain column names that are
+            supported. Currently these are 'prefLabel','altLabel',
+            'Elucidation', 'Comments', 'Examples', 'subClassOf', 'Relations'.
+            Multiple entries are separated with ';'.
+        metadata_sheet_name: Name of sheet where metadata are defined.
+            The first row contains column names 'Metadata name' and 'Value'
+            Supported 'Metadata names' are: 'Ontology IRI',
+            'Ontology vesion IRI', 'Ontology version Info', 'Title',
+            'Abstract', 'License', 'Comment', 'Author', 'Contributor'.
+            Multiple entries are separated with a semi-colon (`;`).
+        imports_sheet_name: Name of sheet where imported ontologies are
+            defined.
+            Column name is 'Imported ontologies'.
+            Fully resolvable URL or path to imported ontologies provided one
+            per row.
+        dataproperties_sheet_name: Name of sheet where data properties are
+            defined. The second row of this sheet should contain column names
+            that are supported. Currently these are 'prefLabel','altLabel',
+            'Elucidation', 'Comments', 'Examples', 'subPropertyOf',
+            'Domain', 'Range', 'dijointWith', 'equivalentTo'.
+        annotationproperties_sheet_name: Name of sheet where annotation
+            properties are defined. The second row of this sheet should contain
+            column names that are supported. Currently these are 'prefLabel',
+            'altLabel', 'Elucidation', 'Comments', 'Examples', 'subPropertyOf',
+            'Domain', 'Range'.
+        objectproperties_sheet_name: Name of sheet where object properties are
+            defined.The second row of this sheet should contain column names
+            that are supported. Currently these are 'prefLabel','altLabel',
+            'Elucidation', 'Comments', 'Examples', 'subPropertyOf',
+            'Domain', 'Range', 'inverseOf', 'dijointWith', 'equivalentTo'.
+        base_iri: Base IRI of the new ontology.
+        base_iri_from_metadata: Whether to use base IRI defined from metadata.
+        imports: List of imported ontologies.
+        catalog: Imported ontologies with (name, full path) key/value-pairs.
+        force: Forcibly make an ontology by skipping concepts
+            that are erroneously defined or other errors in the excel sheet.
+        input_ontology: Ontology that should be updated.
+            Default is None,
+            which means that a completely new ontology is generated.
+            If an input_ontology to be updated is provided,
+            the metadata sheet in the excel sheet will not be considered.
+
+
+    Returns:
+        A tuple with the:
+
+            * created ontology
+            * associated catalog of ontology names and resolvable path as dict
+            * a dictionary with lists of concepts that raise errors, with the
+              following keys:
+
+                - "already_defined": These are concepts (classes)
+                    that are already in the
+                    ontology, because they were already added in a
+                    previous line of the excelfile/pandas dataframe, or because
+                    it is already defined in an imported ontology with the same
+                    base_iri as the newly created ontology.
+                - "in_imported_ontologies": Concepts (classes)
+                    that are defined in the
+                    excel, but already exist in the imported ontologies.
+                - "wrongly_defined": Concepts (classes) that are given an
+                    invalid prefLabel (e.g. with a space in the name).
+                - "missing_subClassOf": Concepts (classes) that are missing
+                    parents. These concepts are added directly under owl:Thing.
+                - "invalid_subClassOf": Concepts (classes) with invalidly
+                    defined parents.
+                    These concepts are added directly under owl:Thing.
+                - "nonadded_concepts": List of all concepts (classes) that are
+                    not added,
+                    either because the prefLabel is invalid, or because the
+                    concept has already been added once or already exists in an
+                    imported ontology.
+                - "obj_prop_already_defined": Object properties that are already
+                    defined in the ontology.
+                - "obj_prop_in_imported_ontologies": Object properties that are
+                    defined in the excel, but already exist in the imported
+                    ontologies.
+                - "obj_prop_wrongly_defined": Object properties that are given
+                    an invalid prefLabel (e.g. with a space in the name).
+                - "obj_prop_missing_subPropertyOf": Object properties that are
+                    missing parents.
+                - "obj_prop_invalid_subPropertyOf": Object properties with
+                    invalidly defined parents.
+                - "obj_prop_nonadded_entities": List of all object properties
+                    that are not added, either because the prefLabel is invalid,
+                    or because the concept has already been added once or
+                    already exists in an imported ontology.
+                - "obj_prop_errors_in_properties": Object properties with
+                    invalidly defined properties.
+                - "obj_prop_errors_in_range": Object properties with invalidly
+                    defined range.
+                - "obj_prop_errors_in_domain": Object properties with invalidly
+                    defined domain.
+                - "annot_prop_already_defined": Annotation properties that are
+                    already defined in the ontology.
+                - "annot_prop_in_imported_ontologies":  Annotation properties
+                    that
+                    are defined in the excel, but already exist in the imported
+                    ontologies.
+                - "annot_prop_wrongly_defined": Annotation properties that are
+                    given an invalid prefLabel (e.g. with a space in the name).
+                - "annot_prop_missing_subPropertyOf": Annotation properties that
+                    are missing parents.
+                - "annot_prop_invalid_subPropertyOf": Annotation properties with
+                    invalidly defined parents.
+                - "annot_prop_nonadded_entities": List of all annotation
+                    properties that are not added, either because the prefLabel
+                    is invalid, or because the concept has already been added
+                    once or already exists in an imported ontology.
+                - "annot_prop_errors_in_properties": Annotation properties with
+                    invalidly defined properties.
+                - "data_prop_already_defined": Data properties that are already
+                    defined in the ontology.
+                - "data_prop_in_imported_ontologies": Data properties that are
+                    defined in the excel, but already exist in the imported
+                    ontologies.
+                - "data_prop_wrongly_defined":  Data properties that are given
+                    an invalid prefLabel (e.g. with a space in the name).
+                - "data_prop_missing_subPropertyOf": Data properties that are
+                    missing parents.
+                - "data_prop_invalid_subPropertyOf": Data properties with
+                    invalidly defined parents.
+                - "data_prop_nonadded_entities": List of all data properties
+                    that are not added, either because the prefLabel is invalid,
+                    or because the concept has already been added once or
+                    already exists in an imported ontology.
+                - "data_prop_errors_in_properties": Data properties with
+                    invalidly defined properties.
+                - "data_prop_errors_in_range": Data properties with invalidly
+                    defined range.
+                - "data_prop_errors_in_domain": Data properties with invalidly
+                    defined domain.
+
+    """
+    web_protocol = "http://", "https://", "ftp://"
+
+    def _relative_to_absolute_paths(path):
+        if isinstance(path, str):
+            if not path.startswith(web_protocol):
+                path = os.path.dirname(excelpath) + "/" + str(path)
+        return path
+
+    try:
+        imports = pd.read_excel(
+            excelpath, sheet_name=imports_sheet_name, skiprows=[1]
+        )
+    except ValueError:
+        imports = pd.DataFrame()
+    else:
+        # Strip leading and trailing white spaces in paths
+        imports.replace(r"^\s+", "", regex=True).replace(
+            r"\s+$", "", regex=True
+        )
+        # Set empty strings to nan
+        imports = imports.replace(r"^\s*$", np.nan, regex=True)
+        if "Imported ontologies" in imports.columns:
+            imports["Imported ontologies"] = imports[
+                "Imported ontologies"
+            ].apply(_relative_to_absolute_paths)
+
+    # Read datafile TODO: Some magic to identify the header row
+    conceptdata = pd.read_excel(
+        excelpath, sheet_name=concept_sheet_name, skiprows=[0, 2]
+    )
+    try:
+        objectproperties = pd.read_excel(
+            excelpath, sheet_name=objectproperties_sheet_name, skiprows=[0, 2]
+        )
+        if "prefLabel" not in objectproperties.columns:
+            warnings.warn(
+                "The 'prefLabel' column is missing in "
+                f"{objectproperties_sheet_name}. "
+                "New object properties will not be added to the ontology."
+            )
+            objectproperties = None
+    except ValueError:
+        warnings.warn(
+            f"No sheet named {objectproperties_sheet_name} found "
+            f"in {excelpath}. "
+            "New object properties will not be added to the ontology."
+        )
+        objectproperties = None
+    try:
+        annotationproperties = pd.read_excel(
+            excelpath,
+            sheet_name=annotationproperties_sheet_name,
+            skiprows=[0, 2],
+        )
+        if "prefLabel" not in annotationproperties.columns:
+            warnings.warn(
+                "The 'prefLabel' column is missing in "
+                f"{annotationproperties_sheet_name}. "
+                "New annotation properties will not be added to the ontology."
+            )
+            annotationproperties = None
+    except ValueError:
+        warnings.warn(
+            f"No sheet named {annotationproperties_sheet_name} "
+            f"found in {excelpath}. "
+            "New annotation properties will not be added to the ontology."
+        )
+        annotationproperties = None
+
+    try:
+        dataproperties = pd.read_excel(
+            excelpath, sheet_name=dataproperties_sheet_name, skiprows=[0, 2]
+        )
+        if "prefLabel" not in dataproperties.columns:
+            warnings.warn(
+                "The 'prefLabel' column is missing in "
+                f"{dataproperties_sheet_name}. "
+                "New data properties will not be added to the ontology."
+            )
+            dataproperties = None
+    except ValueError:
+        warnings.warn(
+            f"No sheet named {dataproperties_sheet_name} found in {excelpath}. "
+            "New data properties will not be added to the ontology."
+        )
+        dataproperties = None
+
+    metadata = pd.read_excel(excelpath, sheet_name=metadata_sheet_name)
+    return create_ontology_from_pandas(
+        data=conceptdata,
+        objectproperties=objectproperties,
+        dataproperties=dataproperties,
+        annotationproperties=annotationproperties,
+        metadata=metadata,
+        imports=imports,
+        base_iri=base_iri,
+        base_iri_from_metadata=base_iri_from_metadata,
+        catalog=catalog,
+        force=force,
+        input_ontology=input_ontology,
+    )
+
+
+
+ +
+ + + +
+ + + +

+create_ontology_from_pandas(data, objectproperties, annotationproperties, dataproperties, metadata, imports, base_iri='http://emmo.info/emmo/domain/onto#', base_iri_from_metadata=True, catalog=None, force=False, input_ontology=None) + + +

+ +
+ +

Create an ontology from a pandas DataFrame.

+

Check 'create_ontology_from_excel' for complete documentation.

+ +
+ Source code in ontopy/excelparser.py +
def create_ontology_from_pandas(  # pylint:disable=too-many-locals,too-many-branches,too-many-statements,too-many-arguments
+    data: pd.DataFrame,
+    objectproperties: pd.DataFrame,
+    annotationproperties: pd.DataFrame,
+    dataproperties: pd.DataFrame,
+    metadata: pd.DataFrame,
+    imports: pd.DataFrame,
+    base_iri: str = "http://emmo.info/emmo/domain/onto#",
+    base_iri_from_metadata: bool = True,
+    catalog: dict = None,
+    force: bool = False,
+    input_ontology: Union[ontopy.ontology.Ontology, None] = None,
+) -> Tuple[ontopy.ontology.Ontology, dict]:
+    """
+    Create an ontology from a pandas DataFrame.
+
+    Check 'create_ontology_from_excel' for complete documentation.
+    """
+    # Get ontology to which new concepts should be added
+    if input_ontology:
+        onto = input_ontology
+        catalog = {}
+    else:  # Create new ontology
+        onto, catalog = get_metadata_from_dataframe(
+            metadata, base_iri, imports=imports
+        )
+
+        # Set given or default base_iri if base_iri_from_metadata is False.
+        if not base_iri_from_metadata:
+            onto.base_iri = base_iri
+    # onto.sync_python_names()
+    # prefLabel, label, and altLabel
+    # are default label annotations
+    onto.set_default_label_annotations()
+    # Add object properties
+    if objectproperties is not None:
+        objectproperties = _clean_dataframe(objectproperties)
+        (
+            onto,
+            objectproperties_with_errors,
+            added_objprop_indices,
+        ) = _add_entities(
+            onto=onto,
+            data=objectproperties,
+            entitytype=owlready2.ObjectPropertyClass,
+            force=force,
+        )
+
+    if annotationproperties is not None:
+        annotationproperties = _clean_dataframe(annotationproperties)
+        (
+            onto,
+            annotationproperties_with_errors,
+            added_annotprop_indices,
+        ) = _add_entities(
+            onto=onto,
+            data=annotationproperties,
+            entitytype=owlready2.AnnotationPropertyClass,
+            force=force,
+        )
+
+    if dataproperties is not None:
+        dataproperties = _clean_dataframe(dataproperties)
+        (
+            onto,
+            dataproperties_with_errors,
+            added_dataprop_indices,
+        ) = _add_entities(
+            onto=onto,
+            data=dataproperties,
+            entitytype=owlready2.DataPropertyClass,
+            force=force,
+        )
+    onto.sync_attributes(
+        name_policy="uuid", name_prefix="EMMO_", class_docstring="elucidation"
+    )
+    # Clean up data frame with new concepts
+    data = _clean_dataframe(data)
+    # Add entities
+    onto, entities_with_errors, added_concept_indices = _add_entities(
+        onto=onto, data=data, entitytype=owlready2.ThingClass, force=force
+    )
+
+    # Add entity properties in a second loop
+    for index in added_concept_indices:
+        row = data.loc[index]
+        properties = row["Relations"]
+        if properties == "nan":
+            properties = None
+        if isinstance(properties, str):
+            try:
+                entity = onto.get_by_label(row["prefLabel"].strip())
+            except NoSuchLabelError:
+                pass
+            props = properties.split(";")
+            for prop in props:
+                try:
+                    entity.is_a.append(evaluate(onto, prop.strip()))
+                except pyparsing.ParseException as exc:
+                    warnings.warn(
+                        # This is currently not tested
+                        f"Error in Property assignment for: '{entity}'. "
+                        f"Property to be Evaluated: '{prop}'. "
+                        f"{exc}"
+                    )
+                    entities_with_errors["errors_in_properties"].append(
+                        entity.name
+                    )
+                except NoSuchLabelError as exc:
+                    msg = (
+                        f"Error in Property assignment for: {entity}. "
+                        f"Property to be Evaluated: {prop}. "
+                        f"{exc}"
+                    )
+                    if force is True:
+                        warnings.warn(msg)
+                        entities_with_errors["errors_in_properties"].append(
+                            entity.name
+                        )
+                    else:
+                        raise ExcelError(msg) from exc
+
+    # Add range and domain for object properties
+    if objectproperties is not None:
+        onto, objectproperties_with_errors = _add_range_domain(
+            onto=onto,
+            properties=objectproperties,
+            added_prop_indices=added_objprop_indices,
+            properties_with_errors=objectproperties_with_errors,
+            force=force,
+        )
+        for key, value in objectproperties_with_errors.items():
+            entities_with_errors["obj_prop_" + key] = value
+    # Add range and domain for annotation properties
+    if annotationproperties is not None:
+        onto, annotationproperties_with_errors = _add_range_domain(
+            onto=onto,
+            properties=annotationproperties,
+            added_prop_indices=added_annotprop_indices,
+            properties_with_errors=annotationproperties_with_errors,
+            force=force,
+        )
+        for key, value in annotationproperties_with_errors.items():
+            entities_with_errors["annot_prop_" + key] = value
+
+    # Add range and domain for data properties
+    if dataproperties is not None:
+        onto, dataproperties_with_errors = _add_range_domain(
+            onto=onto,
+            properties=dataproperties,
+            added_prop_indices=added_dataprop_indices,
+            properties_with_errors=dataproperties_with_errors,
+            force=force,
+        )
+        for key, value in dataproperties_with_errors.items():
+            entities_with_errors["data_prop_" + key] = value
+
+    # Synchronise Python attributes to ontology
+    onto.sync_attributes(
+        name_policy="uuid", name_prefix="EMMO_", class_docstring="elucidation"
+    )
+    onto.dir_label = False
+    entities_with_errors = {
+        key: set(value) for key, value in entities_with_errors.items()
+    }
+    return onto, catalog, entities_with_errors
+
+
+
+ +
+ + + +
+ + + +

+get_metadata_from_dataframe(metadata, base_iri, base_iri_from_metadata=True, imports=None, catalog=None) + + +

+ +
+ +

Create ontology with metadata from pd.DataFrame

+ +
+ Source code in ontopy/excelparser.py +
def get_metadata_from_dataframe(  # pylint: disable=too-many-locals,too-many-branches,too-many-statements
+    metadata: pd.DataFrame,
+    base_iri: str,
+    base_iri_from_metadata: bool = True,
+    imports: pd.DataFrame = None,
+    catalog: dict = None,
+) -> Tuple[ontopy.ontology.Ontology, dict]:
+    """Create ontology with metadata from pd.DataFrame"""
+
+    # base_iri from metadata if it exists and base_iri_from_metadata
+    if base_iri_from_metadata:
+        try:
+            base_iris = _parse_literal(metadata, "Ontology IRI", metadata=True)
+            if len(base_iris) > 1:
+                warnings.warn(
+                    "More than one Ontology IRI given. The first was chosen."
+                )
+            base_iri = base_iris[0] + "#"
+        except (TypeError, ValueError, AttributeError, IndexError):
+            pass
+
+    # Create new ontology
+    onto = get_ontology(base_iri)
+
+    # Add imported ontologies
+    catalog = {} if catalog is None else catalog
+    locations = set()
+    for _, row in imports.iterrows():
+        # for location in imports:
+        location = row["Imported ontologies"]
+        if not pd.isna(location) and location not in locations:
+            imported = onto.world.get_ontology(location).load()
+            onto.imported_ontologies.append(imported)
+            catalog[imported.base_iri.rstrip("#/")] = location
+            try:
+                cat = read_catalog(location.rsplit("/", 1)[0])
+                catalog.update(cat)
+            except ReadCatalogError:
+                warnings.warn(f"Catalog for {imported} not found.")
+            locations.add(location)
+        # set defined prefix
+        if not pd.isna(row["prefix"]):
+            # set prefix for all ontologies with same 'base_iri_root'
+            if not pd.isna(row["base_iri_root"]):
+                onto.set_common_prefix(
+                    iri_base=row["base_iri_root"], prefix=row["prefix"]
+                )
+            # If base_root not given, set prefix only to top ontology
+            else:
+                imported.prefix = row["prefix"]
+
+    with onto:
+        # Add title
+        try:
+            _add_literal(
+                metadata,
+                onto.metadata.title,
+                "Title",
+                metadata=True,
+                only_one=True,
+            )
+        except AttributeError:
+            pass
+
+        # Add license
+        try:
+            _add_literal(
+                metadata, onto.metadata.license, "License", metadata=True
+            )
+        except AttributeError:
+            pass
+
+        # Add authors/creators
+        try:
+            _add_literal(
+                metadata, onto.metadata.creator, "Author", metadata=True
+            )
+        except AttributeError:
+            pass
+
+        # Add contributors
+        try:
+            _add_literal(
+                metadata,
+                onto.metadata.contributor,
+                "Contributor",
+                metadata=True,
+            )
+        except AttributeError:
+            pass
+
+        # Add versionInfo
+        try:
+            _add_literal(
+                metadata,
+                onto.metadata.versionInfo,
+                "Ontology version Info",
+                metadata=True,
+                only_one=True,
+            )
+        except AttributeError:
+            pass
+    return onto, catalog
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/ontopy/factpluspluswrapper/factppgraph/index.html b/0.6.1/api_reference/ontopy/factpluspluswrapper/factppgraph/index.html new file mode 100644 index 000000000..e5082697b --- /dev/null +++ b/0.6.1/api_reference/ontopy/factpluspluswrapper/factppgraph/index.html @@ -0,0 +1,3379 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + factppgraph - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + +

factppgraph

+ + +
+ + +
+ +

ontopy.factpluspluswrapper.factppgraph

+ + + +
+ + + + + + + +
+ + + +

+ +FaCTPPGraph + + + +

+ +
+ +

Class for running the FaCT++ reasoner (using OwlApiInterface) and +postprocessing the resulting inferred ontology.

+

Parameters

+

graph : owlapi.Graph instance + The graph to be inferred.

+ +
+ Source code in ontopy/factpluspluswrapper/factppgraph.py +
class FaCTPPGraph:
+    """Class for running the FaCT++ reasoner (using OwlApiInterface) and
+    postprocessing the resulting inferred ontology.
+
+    Parameters
+    ----------
+    graph : owlapi.Graph instance
+        The graph to be inferred.
+    """
+
+    def __init__(self, graph):
+        self.graph = graph
+        self._inferred = None
+        self._namespaces = None
+        self._base_iri = None
+
+    @property
+    def inferred(self):
+        """The current inferred graph."""
+        if self._inferred is None:
+            self._inferred = self.raw_inferred_graph()
+        return self._inferred
+
+    @property
+    def base_iri(self):
+        """Base iri of inferred ontology."""
+        if self._base_iri is None:
+            self._base_iri = URIRef(self.asserted_base_iri() + "-inferred")
+        return self._base_iri
+
+    @base_iri.setter
+    def base_iri(self, value):
+        """Assign inferred base iri."""
+        self._base_iri = URIRef(value)
+
+    @property
+    def namespaces(self):
+        """Namespaces defined in the original graph."""
+        if self._namespaces is None:
+            self._namespaces = dict(self.graph.namespaces()).copy()
+            self._namespaces[""] = self.base_iri
+        return self._namespaces
+
+    def asserted_base_iri(self):
+        """Returns the base iri or the original graph."""
+        return URIRef(dict(self.graph.namespaces()).get("", "").rstrip("#/"))
+
+    def raw_inferred_graph(self):
+        """Returns the raw non-postprocessed inferred ontology as a rdflib
+        graph."""
+        return OwlApiInterface().reason(self.graph)
+
+    def inferred_graph(self):
+        """Returns the postprocessed inferred graph."""
+        self.add_base_annotations()
+        self.set_namespace()
+        self.clean_base()
+        self.remove_nothing_is_nothing()
+        self.clean_ancestors()
+        return self.inferred
+
+    def add_base_annotations(self):
+        """Copy base annotations from original graph to the inferred graph."""
+        base = self.base_iri
+        inferred = self.inferred
+        for _, predicate, obj in self.graph.triples(
+            (self.asserted_base_iri(), None, None)
+        ):
+            if predicate == OWL.versionIRI:
+                version = obj.rsplit("/", 1)[-1]
+                obj = URIRef(f"{base}/{version}")
+            inferred.add((base, predicate, obj))
+
+    def set_namespace(self):
+        """Override namespace of inferred graph with the namespace of the
+        original graph.
+        """
+        inferred = self.inferred
+        for key, value in self.namespaces.items():
+            inferred.namespace_manager.bind(
+                key, value, override=True, replace=True
+            )
+
+    def clean_base(self):
+        """Remove all relations `s? a owl:Ontology` where `s?` is not
+        `base_iri`.
+        """
+        inferred = self.inferred
+        for (
+            subject,
+            predicate,
+            obj,
+        ) in inferred.triples(  # pylint: disable=not-an-iterable
+            (None, RDF.type, OWL.Ontology)
+        ):
+            inferred.remove((subject, predicate, obj))
+        inferred.add((self.base_iri, RDF.type, OWL.Ontology))
+
+    def remove_nothing_is_nothing(self):
+        """Remove superfluid relation in inferred graph:
+
+        owl:Nothing rdfs:subClassOf owl:Nothing
+        """
+        triple = OWL.Nothing, RDFS.subClassOf, OWL.Nothing
+        inferred = self.inferred
+        if triple in inferred:
+            inferred.remove(triple)
+
+    def clean_ancestors(self):
+        """Remove redundant rdfs:subClassOf relations in inferred graph."""
+        inferred = self.inferred
+        for (  # pylint: disable=too-many-nested-blocks
+            subject
+        ) in inferred.subjects(RDF.type, OWL.Class):
+            if isinstance(subject, URIRef):
+                parents = set(
+                    parent
+                    for parent in inferred.objects(subject, RDFS.subClassOf)
+                    if isinstance(parent, URIRef)
+                )
+                if len(parents) > 1:
+                    for parent in parents:
+                        ancestors = set(
+                            inferred.transitive_objects(parent, RDFS.subClassOf)
+                        )
+                        for entity in parents:
+                            if entity != parent and entity in ancestors:
+                                triple = subject, RDFS.subClassOf, entity
+                                if triple in inferred:
+                                    inferred.remove(triple)
+
+
+ + + +
+ + + + + + +
+ + + +

+base_iri + + + property + writable + + +

+ +
+ +

Base iri of inferred ontology.

+
+ +
+ + + +
+ + + +

+inferred + + + property + readonly + + +

+ +
+ +

The current inferred graph.

+
+ +
+ + + +
+ + + +

+namespaces + + + property + readonly + + +

+ +
+ +

Namespaces defined in the original graph.

+
+ +
+ + + + + + + +
+ + + +

+add_base_annotations(self) + + +

+ +
+ +

Copy base annotations from original graph to the inferred graph.

+ +
+ Source code in ontopy/factpluspluswrapper/factppgraph.py +
def add_base_annotations(self):
+    """Copy base annotations from original graph to the inferred graph."""
+    base = self.base_iri
+    inferred = self.inferred
+    for _, predicate, obj in self.graph.triples(
+        (self.asserted_base_iri(), None, None)
+    ):
+        if predicate == OWL.versionIRI:
+            version = obj.rsplit("/", 1)[-1]
+            obj = URIRef(f"{base}/{version}")
+        inferred.add((base, predicate, obj))
+
+
+
+ +
+ + + +
+ + + +

+asserted_base_iri(self) + + +

+ +
+ +

Returns the base iri or the original graph.

+ +
+ Source code in ontopy/factpluspluswrapper/factppgraph.py +
def asserted_base_iri(self):
+    """Returns the base iri or the original graph."""
+    return URIRef(dict(self.graph.namespaces()).get("", "").rstrip("#/"))
+
+
+
+ +
+ + + +
+ + + +

+clean_ancestors(self) + + +

+ +
+ +

Remove redundant rdfs:subClassOf relations in inferred graph.

+ +
+ Source code in ontopy/factpluspluswrapper/factppgraph.py +
def clean_ancestors(self):
+    """Remove redundant rdfs:subClassOf relations in inferred graph."""
+    inferred = self.inferred
+    for (  # pylint: disable=too-many-nested-blocks
+        subject
+    ) in inferred.subjects(RDF.type, OWL.Class):
+        if isinstance(subject, URIRef):
+            parents = set(
+                parent
+                for parent in inferred.objects(subject, RDFS.subClassOf)
+                if isinstance(parent, URIRef)
+            )
+            if len(parents) > 1:
+                for parent in parents:
+                    ancestors = set(
+                        inferred.transitive_objects(parent, RDFS.subClassOf)
+                    )
+                    for entity in parents:
+                        if entity != parent and entity in ancestors:
+                            triple = subject, RDFS.subClassOf, entity
+                            if triple in inferred:
+                                inferred.remove(triple)
+
+
+
+ +
+ + + +
+ + + +

+clean_base(self) + + +

+ +
+ +

Remove all relations s? a owl:Ontology where s? is not +base_iri.

+ +
+ Source code in ontopy/factpluspluswrapper/factppgraph.py +
def clean_base(self):
+    """Remove all relations `s? a owl:Ontology` where `s?` is not
+    `base_iri`.
+    """
+    inferred = self.inferred
+    for (
+        subject,
+        predicate,
+        obj,
+    ) in inferred.triples(  # pylint: disable=not-an-iterable
+        (None, RDF.type, OWL.Ontology)
+    ):
+        inferred.remove((subject, predicate, obj))
+    inferred.add((self.base_iri, RDF.type, OWL.Ontology))
+
+
+
+ +
+ + + +
+ + + +

+inferred_graph(self) + + +

+ +
+ +

Returns the postprocessed inferred graph.

+ +
+ Source code in ontopy/factpluspluswrapper/factppgraph.py +
def inferred_graph(self):
+    """Returns the postprocessed inferred graph."""
+    self.add_base_annotations()
+    self.set_namespace()
+    self.clean_base()
+    self.remove_nothing_is_nothing()
+    self.clean_ancestors()
+    return self.inferred
+
+
+
+ +
+ + + +
+ + + +

+raw_inferred_graph(self) + + +

+ +
+ +

Returns the raw non-postprocessed inferred ontology as a rdflib +graph.

+ +
+ Source code in ontopy/factpluspluswrapper/factppgraph.py +
def raw_inferred_graph(self):
+    """Returns the raw non-postprocessed inferred ontology as a rdflib
+    graph."""
+    return OwlApiInterface().reason(self.graph)
+
+
+
+ +
+ + + +
+ + + +

+remove_nothing_is_nothing(self) + + +

+ +
+ +

Remove superfluid relation in inferred graph:

+

owl:Nothing rdfs:subClassOf owl:Nothing

+ +
+ Source code in ontopy/factpluspluswrapper/factppgraph.py +
def remove_nothing_is_nothing(self):
+    """Remove superfluid relation in inferred graph:
+
+    owl:Nothing rdfs:subClassOf owl:Nothing
+    """
+    triple = OWL.Nothing, RDFS.subClassOf, OWL.Nothing
+    inferred = self.inferred
+    if triple in inferred:
+        inferred.remove(triple)
+
+
+
+ +
+ + + +
+ + + +

+set_namespace(self) + + +

+ +
+ +

Override namespace of inferred graph with the namespace of the +original graph.

+ +
+ Source code in ontopy/factpluspluswrapper/factppgraph.py +
def set_namespace(self):
+    """Override namespace of inferred graph with the namespace of the
+    original graph.
+    """
+    inferred = self.inferred
+    for key, value in self.namespaces.items():
+        inferred.namespace_manager.bind(
+            key, value, override=True, replace=True
+        )
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + +
+ + + +

+ +FactPPError + + + +

+ +
+ +

Postprocessing error after reasoning with FaCT++.

+ +
+ Source code in ontopy/factpluspluswrapper/factppgraph.py +
class FactPPError:
+    """Postprocessing error after reasoning with FaCT++."""
+
+
+ + +
+ +
+ + + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/ontopy/factpluspluswrapper/owlapi_interface/index.html b/0.6.1/api_reference/ontopy/factpluspluswrapper/owlapi_interface/index.html new file mode 100644 index 000000000..08f167ade --- /dev/null +++ b/0.6.1/api_reference/ontopy/factpluspluswrapper/owlapi_interface/index.html @@ -0,0 +1,1856 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + owlapi_interface - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + +

owlapi_interface

+ + +
+ + +
+ +

Python interface to the FaCT++ Reasoner.

+

This module is copied from the SimPhoNy project.

+

Original author: Matthias Urban

+ + + +
+ + + + + + + + + +
+ + + +

+ +OwlApiInterface + + + +

+ +
+ +

Interface to the FaCT++ reasoner via OWLAPI.

+ +
+ Source code in ontopy/factpluspluswrapper/owlapi_interface.py +
class OwlApiInterface:
+    """Interface to the FaCT++ reasoner via OWLAPI."""
+
+    def __init__(self):
+        """Initialize the interface."""
+
+    def reason(self, graph):
+        """Generate the inferred axioms for a given Graph.
+
+        Args:
+            graph (Graph): An rdflib graph to execute the reasoner on.
+
+        """
+        with tempfile.NamedTemporaryFile("wt") as tmpdir:
+            graph.serialize(tmpdir.name, format="xml")
+            return self._run(tmpdir.name, command="--run-reasoner")
+
+    def reason_files(self, *owl_files):
+        """Merge the given owl and generate the inferred axioms.
+
+        Args:
+            *owl_files (os.path): The owl files two merge.
+
+        """
+        return self._run(*owl_files, command="--run-reasoner")
+
+    def merge_files(self, *owl_files):
+        """Merge the given owl files and its import closure.
+
+        Args:
+            *owl_files (os.path): The owl files two merge.
+
+        """
+        return self._run(*owl_files, command="--merge-only")
+
+    @staticmethod
+    def _run(
+        *owl_files, command, output_file=None, return_graph=True
+    ) -> rdflib.Graph:
+        """Run the FaCT++ reasoner using a java command.
+
+        Args:
+            *owl_files (str): Path to the owl files to load.
+            command (str): Either --run-reasoner or --merge-only
+            output_file (str, optional): Where the output should be stored.
+                Defaults to None.
+            return_graph (bool, optional): Whether the result should be parsed
+                and returned. Defaults to True.
+
+        Returns:
+            The reasoned result.
+
+        """
+        java_base = os.path.abspath(
+            os.path.join(os.path.dirname(__file__), "java")
+        )
+        cmd = (
+            [
+                "java",
+                "-cp",
+                java_base + "/lib/jars/*",
+                "-Djava.library.path=" + java_base + "/lib/so",
+                "org.simphony.OntologyLoader",
+            ]
+            + [command]
+            + list(owl_files)
+        )
+        logger.info("Running Reasoner")
+        logger.debug("Command %s", cmd)
+        subprocess.run(cmd, check=True)  # nosec
+
+        graph = None
+        if return_graph:
+            graph = rdflib.Graph()
+            graph.parse(RESULT_FILE)
+        if output_file:
+            os.rename(RESULT_FILE, output_file)
+        else:
+            os.remove(RESULT_FILE)
+        return graph
+
+
+ + + +
+ + + + + + + + + +
+ + + +

+__init__(self) + + + special + + +

+ +
+ +

Initialize the interface.

+ +
+ Source code in ontopy/factpluspluswrapper/owlapi_interface.py +
def __init__(self):
+    """Initialize the interface."""
+
+
+
+ +
+ + + +
+ + + +

+merge_files(self, *owl_files) + + +

+ +
+ +

Merge the given owl files and its import closure.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
*owl_filesos.path

The owl files two merge.

()
+
+ Source code in ontopy/factpluspluswrapper/owlapi_interface.py +
def merge_files(self, *owl_files):
+    """Merge the given owl files and its import closure.
+
+    Args:
+        *owl_files (os.path): The owl files two merge.
+
+    """
+    return self._run(*owl_files, command="--merge-only")
+
+
+
+ +
+ + + +
+ + + +

+reason(self, graph) + + +

+ +
+ +

Generate the inferred axioms for a given Graph.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
graphGraph

An rdflib graph to execute the reasoner on.

required
+
+ Source code in ontopy/factpluspluswrapper/owlapi_interface.py +
def reason(self, graph):
+    """Generate the inferred axioms for a given Graph.
+
+    Args:
+        graph (Graph): An rdflib graph to execute the reasoner on.
+
+    """
+    with tempfile.NamedTemporaryFile("wt") as tmpdir:
+        graph.serialize(tmpdir.name, format="xml")
+        return self._run(tmpdir.name, command="--run-reasoner")
+
+
+
+ +
+ + + +
+ + + +

+reason_files(self, *owl_files) + + +

+ +
+ +

Merge the given owl and generate the inferred axioms.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
*owl_filesos.path

The owl files two merge.

()
+
+ Source code in ontopy/factpluspluswrapper/owlapi_interface.py +
def reason_files(self, *owl_files):
+    """Merge the given owl and generate the inferred axioms.
+
+    Args:
+        *owl_files (os.path): The owl files two merge.
+
+    """
+    return self._run(*owl_files, command="--run-reasoner")
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + +
+ + + +

+reason_from_terminal() + + +

+ +
+ +

Run the reasoner from terminal.

+ +
+ Source code in ontopy/factpluspluswrapper/owlapi_interface.py +
def reason_from_terminal():
+    """Run the reasoner from terminal."""
+    parser = argparse.ArgumentParser(
+        description="Run the FaCT++ reasoner on the given OWL file. "
+        "Catalog files are used to load the import closure. "
+        "Then the reasoner is executed and the inferred triples are merged "
+        "with the asserted ones. If multiple OWL files are given, they are "
+        "merged beforehand"
+    )
+    parser.add_argument(
+        "owl_file", nargs="+", help="OWL file(s) to run the reasoner on."
+    )
+    parser.add_argument("output_file", help="Path to store inferred axioms to.")
+
+    args = parser.parse_args()
+    OwlApiInterface()._run(  # pylint: disable=protected-access
+        *args.owl_file,
+        command="--run-reasoner",
+        return_graph=False,
+        output_file=args.output_file,
+    )
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/ontopy/factpluspluswrapper/sync_factpp/index.html b/0.6.1/api_reference/ontopy/factpluspluswrapper/sync_factpp/index.html new file mode 100644 index 000000000..0683f48fe --- /dev/null +++ b/0.6.1/api_reference/ontopy/factpluspluswrapper/sync_factpp/index.html @@ -0,0 +1,1934 @@ + + + + + + + + + + + + + + + + + + + + + + + + + sync_factpp - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + +

sync_factpp

+ + +
+ + +
+ +

ontopy.factpluspluswrapper.syncfatpp

+ + + +
+ + + + + + + + + +
+ + + +

+sync_reasoner_factpp(ontology_or_world=None, infer_property_values=False, debug=1) + + +

+ +
+ +

Run FaCT++ reasoner and load the inferred relations back into +the owlready2 triplestore.

+

Parameters

+

ontology_or_world : None | Ontology instance | World instance | list + Identifies the world to run the reasoner over. +infer_property_values : bool + Whether to also infer property values. +debug : bool + Whether to print debug info to standard output.

+ +
+ Source code in ontopy/factpluspluswrapper/sync_factpp.py +
def sync_reasoner_factpp(
+    ontology_or_world=None, infer_property_values=False, debug=1
+):
+    """Run FaCT++ reasoner and load the inferred relations back into
+    the owlready2 triplestore.
+
+    Parameters
+    ----------
+    ontology_or_world : None | Ontology instance | World instance | list
+        Identifies the world to run the reasoner over.
+    infer_property_values : bool
+        Whether to also infer property values.
+    debug : bool
+        Whether to print debug info to standard output.
+    """
+    # pylint: disable=too-many-locals,too-many-branches,too-many-statements
+    if isinstance(ontology_or_world, World):
+        world = ontology_or_world
+    elif isinstance(ontology_or_world, Ontology):
+        world = ontology_or_world.world
+    elif isinstance(ontology_or_world, Sequence):
+        world = ontology_or_world[0].world
+    else:
+        world = owlready2.default_world
+
+    if isinstance(ontology_or_world, Ontology):
+        ontology = ontology_or_world
+    elif CURRENT_NAMESPACES.get():
+        ontology = CURRENT_NAMESPACES.get()[-1].ontology
+    else:
+        ontology = world.get_ontology(_INFERRENCES_ONTOLOGY)
+
+    locked = world.graph.has_write_lock()
+    if locked:
+        world.graph.release_write_lock()  # Not needed during reasoning
+
+    try:
+        if debug:
+            print("*** Prepare graph")
+        # Exclude owl:imports because they are not needed and can
+        # cause trouble when loading the inferred ontology
+        graph1 = rdflib.Graph()
+        for subject, predicate, obj in world.as_rdflib_graph().triples(
+            (None, None, None)
+        ):
+            if predicate != OWL.imports:
+                graph1.add((subject, predicate, obj))
+
+        if debug:
+            print("*** Run FaCT++ reasoner (and postprocess)")
+        graph2 = FaCTPPGraph(graph1).inferred_graph()
+
+        if debug:
+            print("*** Load inferred ontology")
+        # Check all rdfs:subClassOf relations in the inferred graph and add
+        # them to the world if they are missing
+        new_parents = defaultdict(list)
+        new_equivs = defaultdict(list)
+        entity_2_type = {}
+
+        for (
+            subject,
+            predicate,
+            obj,
+        ) in graph2.triples(  # pylint: disable=not-an-iterable
+            (None, None, None)
+        ):
+            if (
+                isinstance(subject, URIRef)
+                and predicate in OWL_2_TYPE
+                and isinstance(obj, URIRef)
+            ):
+                s_storid = ontology._abbreviate(str(subject), False)
+                p_storid = ontology._abbreviate(str(predicate), False)
+                o_storid = ontology._abbreviate(str(obj), False)
+                if (
+                    s_storid is not None
+                    and p_storid is not None
+                    and o_storid is not None
+                ):
+                    if predicate in (
+                        RDFS.subClassOf,
+                        RDFS.subPropertyOf,
+                        RDF.type,
+                    ):
+                        new_parents[s_storid].append(o_storid)
+                        entity_2_type[s_storid] = OWL_2_TYPE[predicate]
+                    else:
+                        new_equivs[s_storid].append(o_storid)
+                        entity_2_type[s_storid] = OWL_2_TYPE[predicate]
+
+        if infer_property_values:
+            inferred_obj_relations = []
+            # Hmm, does FaCT++ infer any property values?
+            # If not, remove the `infer_property_values` keyword argument.
+            raise NotImplementedError
+
+    finally:
+        if locked:
+            world.graph.acquire_write_lock()  # re-lock when applying results
+
+    if debug:
+        print("*** Applying reasoning results")
+
+    _apply_reasoning_results(
+        world, ontology, debug, new_parents, new_equivs, entity_2_type
+    )
+    if infer_property_values:
+        _apply_inferred_obj_relations(
+            world, ontology, debug, inferred_obj_relations
+        )
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/ontopy/graph/index.html b/0.6.1/api_reference/ontopy/graph/index.html new file mode 100644 index 000000000..e862c4a4d --- /dev/null +++ b/0.6.1/api_reference/ontopy/graph/index.html @@ -0,0 +1,4355 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + graph - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+
+ + + + +

graph

+ + +
+ + +
+ +

A module for visualising ontologies using graphviz.

+ + + +
+ + + + + + + + + +
+ + + +

+ +OntoGraph + + + +

+ +
+ +

Class for visualising an ontology.

+

Parameters

+

ontology : ontopy.Ontology instance + Ontology to visualize. +root : None | graph.ALL | string | owlready2.ThingClass instance + Name or owlready2 entity of root node to plot subgraph + below. If root is graph.ALL, all classes will be included + in the subgraph. +leaves : None | sequence + A sequence of leaf node names for generating sub-graphs. +entities : None | sequence + A sequence of entities to add to the graph. +relations : "all" | str | None | sequence + Sequence of relations to visualise. If "all", means to include + all relations. +style : None | dict | "default" + A dict mapping the name of the different graphical elements + to dicts of dot graph attributes. Supported graphical elements + include: + - graphtype : "Digraph" | "Graph" + - graph : graph attributes (G) + - class : nodes for classes (N) + - root : additional attributes for root nodes (N) + - leaf : additional attributes for leaf nodes (N) + - defined_class : nodes for defined classes (N) + - class_construct : nodes for class constructs (N) + - individual : nodes for invididuals (N) + - object_property : nodes for object properties (N) + - data_property : nodes for data properties (N) + - annotation_property : nodes for annotation properties (N) + - added_node : nodes added because addnodes is true (N) + - isA : edges for isA relations (E) + - not : edges for not class constructs (E) + - equivalent_to : edges for equivalent_to relations (E) + - disjoint_with : edges for disjoint_with relations (E) + - inverse_of : edges for inverse_of relations (E) + - default_relation : default edges relations and restrictions (E) + - relations : dict of styles for different relations (E) + - inverse : default edges for inverse relations (E) + - default_dataprop : default edges for data properties (E) + - nodes : attribute for individual nodes (N) + - edges : attribute for individual edges (E) + If style is None or "default", the default style is used. + See https://www.graphviz.org/doc/info/attrs.html +edgelabels : None | bool | dict + Whether to add labels to the edges of the generated graph. + It is also possible to provide a dict mapping the + full labels (with cardinality stripped off for restrictions) + to some abbreviations. +addnodes : bool + Whether to add missing target nodes in relations. +addconstructs : bool + Whether to add nodes representing class constructs. +included_namespaces : sequence + In combination with root, only include classes with one of + the listed namespaces. If empty (the default), nothing is + excluded. +included_ontologies : sequence + In combination with root, only include classes defined in + one of the listed ontologies. If empty (default), nothing is + excluded. +parents : int + Include parents levels of parents. +excluded_nodes : None | sequence + Sequence of labels of nodes to exclude. +graph : None | pydot.Dot instance + Graphviz Digraph object to plot into. If None, a new graph object + is created using the keyword arguments. +imported : bool + Whether to include imported classes if entities is None. +kwargs : + Passed to graphviz.Digraph.

+ +
+ Source code in ontopy/graph.py +
class OntoGraph:  # pylint: disable=too-many-instance-attributes
+    """Class for visualising an ontology.
+
+    Parameters
+    ----------
+    ontology : ontopy.Ontology instance
+        Ontology to visualize.
+    root : None | graph.ALL | string | owlready2.ThingClass instance
+        Name or owlready2 entity of root node to plot subgraph
+        below.  If `root` is `graph.ALL`, all classes will be included
+        in the subgraph.
+    leaves : None | sequence
+        A sequence of leaf node names for generating sub-graphs.
+    entities : None | sequence
+        A sequence of entities to add to the graph.
+    relations : "all" | str | None | sequence
+        Sequence of relations to visualise.  If "all", means to include
+        all relations.
+    style : None | dict | "default"
+        A dict mapping the name of the different graphical elements
+        to dicts of dot graph attributes. Supported graphical elements
+        include:
+          - graphtype : "Digraph" | "Graph"
+          - graph : graph attributes (G)
+          - class : nodes for classes (N)
+          - root : additional attributes for root nodes (N)
+          - leaf : additional attributes for leaf nodes (N)
+          - defined_class : nodes for defined classes (N)
+          - class_construct : nodes for class constructs (N)
+          - individual : nodes for invididuals (N)
+          - object_property : nodes for object properties (N)
+          - data_property : nodes for data properties (N)
+          - annotation_property : nodes for annotation properties (N)
+          - added_node : nodes added because `addnodes` is true (N)
+          - isA : edges for isA relations (E)
+          - not : edges for not class constructs (E)
+          - equivalent_to : edges for equivalent_to relations (E)
+          - disjoint_with : edges for disjoint_with relations (E)
+          - inverse_of : edges for inverse_of relations (E)
+          - default_relation : default edges relations and restrictions (E)
+          - relations : dict of styles for different relations (E)
+          - inverse : default edges for inverse relations (E)
+          - default_dataprop : default edges for data properties (E)
+          - nodes : attribute for individual nodes (N)
+          - edges : attribute for individual edges (E)
+        If style is None or "default", the default style is used.
+        See https://www.graphviz.org/doc/info/attrs.html
+    edgelabels : None | bool | dict
+        Whether to add labels to the edges of the generated graph.
+        It is also possible to provide a dict mapping the
+        full labels (with cardinality stripped off for restrictions)
+        to some abbreviations.
+    addnodes : bool
+        Whether to add missing target nodes in relations.
+    addconstructs : bool
+        Whether to add nodes representing class constructs.
+    included_namespaces : sequence
+        In combination with `root`, only include classes with one of
+        the listed namespaces.  If empty (the default), nothing is
+        excluded.
+    included_ontologies : sequence
+        In combination with `root`, only include classes defined in
+        one of the listed ontologies.  If empty (default), nothing is
+        excluded.
+    parents : int
+        Include `parents` levels of parents.
+    excluded_nodes : None | sequence
+        Sequence of labels of nodes to exclude.
+    graph : None | pydot.Dot instance
+        Graphviz Digraph object to plot into.  If None, a new graph object
+        is created using the keyword arguments.
+    imported : bool
+        Whether to include imported classes if `entities` is None.
+    kwargs :
+        Passed to graphviz.Digraph.
+    """
+
+    def __init__(  # pylint: disable=too-many-arguments,too-many-locals
+        self,
+        ontology,
+        root=None,
+        leaves=None,
+        entities=None,
+        relations="isA",
+        style=None,
+        edgelabels=None,
+        addnodes=False,
+        addconstructs=False,
+        included_namespaces=(),
+        included_ontologies=(),
+        parents=0,
+        excluded_nodes=None,
+        graph=None,
+        imported=False,
+        **kwargs,
+    ):
+        if style is None or style == "default":
+            style = _default_style
+
+        if graph is None:
+            graphtype = style.get("graphtype", "Digraph")
+            dotcls = getattr(graphviz, graphtype)
+            graph_attr = kwargs.pop("graph_attr", {})
+            for key, value in style.get("graph", {}).items():
+                graph_attr.setdefault(key, value)
+            self.dot = dotcls(graph_attr=graph_attr, **kwargs)
+            self.nodes = set()
+            self.edges = set()
+        else:
+            if ontology != graph.ontology:
+                raise ValueError(
+                    "the same ontology must be used when extending a graph"
+                )
+            self.dot = graph.dot.copy()
+            self.nodes = graph.nodes.copy()
+            self.edges = graph.edges.copy()
+
+        self.ontology = ontology
+        self.relations = set(
+            [relations] if isinstance(relations, str) else relations
+        )
+        self.style = style
+        self.edgelabels = edgelabels
+        self.addnodes = addnodes
+        self.addconstructs = addconstructs
+        self.excluded_nodes = set(excluded_nodes) if excluded_nodes else set()
+        self.imported = imported
+
+        if root == ALL:
+            self.add_entities(
+                relations=relations,
+                edgelabels=edgelabels,
+                addnodes=addnodes,
+                addconstructs=addconstructs,
+            )
+        elif root:
+            self.add_branch(
+                root,
+                leaves,
+                relations=relations,
+                edgelabels=edgelabels,
+                addnodes=addnodes,
+                addconstructs=addconstructs,
+                included_namespaces=included_namespaces,
+                included_ontologies=included_ontologies,
+            )
+            if parents:
+                self.add_parents(
+                    root,
+                    levels=parents,
+                    relations=relations,
+                    edgelabels=edgelabels,
+                    addnodes=addnodes,
+                    addconstructs=addconstructs,
+                )
+
+        if entities:
+            self.add_entities(
+                entities=entities,
+                relations=relations,
+                edgelabels=edgelabels,
+                addnodes=addnodes,
+                addconstructs=addconstructs,
+            )
+
+    def add_entities(  # pylint: disable=too-many-arguments
+        self,
+        entities=None,
+        relations="isA",
+        edgelabels=None,
+        addnodes=False,
+        addconstructs=False,
+        nodeattrs=None,
+        **attrs,
+    ):
+        """Adds a sequence of entities to the graph.  If `entities` is None,
+        all classes are added to the graph.
+
+        `nodeattrs` is a dict mapping node names to are attributes for
+        dedicated nodes.
+        """
+        if entities is None:
+            entities = self.ontology.classes(imported=self.imported)
+        self.add_nodes(entities, nodeattrs=nodeattrs, **attrs)
+        self.add_edges(
+            relations=relations,
+            edgelabels=edgelabels,
+            addnodes=addnodes,
+            addconstructs=addconstructs,
+            **attrs,
+        )
+
+    def add_branch(  # pylint: disable=too-many-arguments,too-many-locals
+        self,
+        root,
+        leaves=None,
+        include_leaves=True,
+        strict_leaves=False,
+        exclude=None,
+        relations="isA",
+        edgelabels=None,
+        addnodes=False,
+        addconstructs=False,
+        included_namespaces=(),
+        included_ontologies=(),
+        include_parents="closest",
+        **attrs,
+    ):
+        """Adds branch under `root` ending at any entity included in the
+        sequence `leaves`.  If `include_leaves` is true, leaf classes are
+        also included."""
+        if leaves is None:
+            leaves = ()
+        classes = self.ontology.get_branch(
+            root=root,
+            leaves=leaves,
+            include_leaves=include_leaves,
+            strict_leaves=strict_leaves,
+            exclude=exclude,
+        )
+
+        classes = filter_classes(
+            classes,
+            included_namespaces=included_namespaces,
+            included_ontologies=included_ontologies,
+        )
+
+        nodeattrs = {}
+        nodeattrs[get_label(root)] = self.style.get("root", {})
+        for leaf in leaves:
+            nodeattrs[get_label(leaf)] = self.style.get("leaf", {})
+
+        self.add_entities(
+            entities=classes,
+            relations=relations,
+            edgelabels=edgelabels,
+            addnodes=addnodes,
+            addconstructs=addconstructs,
+            nodeattrs=nodeattrs,
+            **attrs,
+        )
+        closest_ancestors = False
+        ancestor_generations = None
+        if include_parents == "closest":
+            closest_ancestors = True
+        elif isinstance(include_parents, int):
+            ancestor_generations = include_parents
+        parents = self.ontology.get_ancestors(
+            classes,
+            closest=closest_ancestors,
+            generations=ancestor_generations,
+            strict=True,
+        )
+        if parents:
+            for parent in parents:
+                nodeattrs[get_label(parent)] = self.style.get("parent_node", {})
+            self.add_entities(
+                entities=parents,
+                relations=relations,
+                edgelabels=edgelabels,
+                addnodes=addnodes,
+                addconstructs=addconstructs,
+                nodeattrs=nodeattrs,
+                **attrs,
+            )
+
+    def add_parents(  # pylint: disable=too-many-arguments
+        self,
+        name,
+        levels=1,
+        relations="isA",
+        edgelabels=None,
+        addnodes=False,
+        addconstructs=False,
+        **attrs,
+    ):
+        """Add `levels` levels of strict parents of entity `name`."""
+
+        def addparents(entity, nodes, parents):
+            if nodes > 0:
+                for parent in entity.get_parents(strict=True):
+                    parents.add(parent)
+                    addparents(parent, nodes - 1, parents)
+
+        entity = self.ontology[name] if isinstance(name, str) else name
+        parents = set()
+        addparents(entity, levels, parents)
+        self.add_entities(
+            entities=parents,
+            relations=relations,
+            edgelabels=edgelabels,
+            addnodes=addnodes,
+            addconstructs=addconstructs,
+            **attrs,
+        )
+
+    def add_node(self, name, nodeattrs=None, **attrs):
+        """Add node with given name. `attrs` are graphviz node attributes."""
+        entity = self.ontology[name] if isinstance(name, str) else name
+        label = get_label(entity)
+        if label not in self.nodes.union(self.excluded_nodes):
+            kwargs = self.get_node_attrs(
+                entity, nodeattrs=nodeattrs, attrs=attrs
+            )
+            if hasattr(entity, "iri"):
+                kwargs.setdefault("URL", entity.iri)
+            self.dot.node(label, label=label, **kwargs)
+            self.nodes.add(label)
+
+    def add_nodes(self, names, nodeattrs, **attrs):
+        """Add nodes with given names. `attrs` are graphviz node attributes."""
+        for name in names:
+            self.add_node(name, nodeattrs=nodeattrs, **attrs)
+
+    def add_edge(self, subject, predicate, obj, edgelabel=None, **attrs):
+        """Add edge corresponding for ``(subject, predicate, object)``
+        triplet."""
+        subject = subject if isinstance(subject, str) else get_label(subject)
+        predicate = (
+            predicate if isinstance(predicate, str) else get_label(predicate)
+        )
+        obj = obj if isinstance(obj, str) else get_label(obj)
+        if subject in self.excluded_nodes or obj in self.excluded_nodes:
+            return
+        if not isinstance(subject, str) or not isinstance(obj, str):
+            raise TypeError("`subject` and `object` must be strings")
+        if subject not in self.nodes:
+            raise RuntimeError(f'`subject` "{subject}" must have been added')
+        if obj not in self.nodes:
+            raise RuntimeError(f'`object` "{obj}" must have been added')
+        key = (subject, predicate, obj)
+        if key not in self.edges:
+            relations = self.style.get("relations", {})
+            rels = set(
+                self.ontology[_] for _ in relations if _ in self.ontology
+            )
+            if (edgelabel is None) and (
+                (predicate in rels) or (predicate == "isA")
+            ):
+                edgelabel = self.edgelabels
+            label = None
+            if edgelabel is None:
+                tokens = predicate.split()
+                if len(tokens) == 2 and tokens[1] in ("some", "only"):
+                    label = f"{tokens[0]} {tokens[1]}"
+                elif len(tokens) == 3 and tokens[1] in (
+                    "exactly",
+                    "min",
+                    "max",
+                ):
+                    label = f"{tokens[0]} {tokens[1]} {tokens[2]}"
+            elif isinstance(edgelabel, str):
+                label = edgelabel
+            elif isinstance(edgelabel, dict):
+                label = edgelabel.get(predicate, predicate)
+            elif edgelabel:
+                label = predicate
+            kwargs = self.get_edge_attrs(predicate, attrs=attrs)
+            self.dot.edge(subject, obj, label=label, **kwargs)
+            self.edges.add(key)
+
+    def add_source_edges(  # pylint: disable=too-many-arguments,too-many-branches
+        self,
+        source,
+        relations=None,
+        edgelabels=None,
+        addnodes=None,
+        addconstructs=None,
+        **attrs,
+    ):
+        """Adds all relations originating from entity `source` who's type
+        are listed in `relations`."""
+        if relations is None:
+            relations = self.relations
+        elif isinstance(relations, str):
+            relations = set([relations])
+        else:
+            relations = set(relations)
+
+        edgelabels = self.edgelabels if edgelabels is None else edgelabels
+        addconstructs = (
+            self.addconstructs if addconstructs is None else addconstructs
+        )
+
+        entity = self.ontology[source] if isinstance(source, str) else source
+        label = get_label(entity)
+        for relation in entity.is_a:
+            # isA
+            if isinstance(
+                relation, (owlready2.ThingClass, owlready2.ObjectPropertyClass)
+            ):
+                if "all" in relations or "isA" in relations:
+                    rlabel = get_label(relation)
+                    # FIXME - we actually want to include individuals...
+                    if isinstance(entity, owlready2.Thing):
+                        continue
+                    if relation not in entity.get_parents(strict=True):
+                        continue
+                    if not self.add_missing_node(relation, addnodes=addnodes):
+                        continue
+                    self.add_edge(
+                        subject=label,
+                        predicate="isA",
+                        obj=rlabel,
+                        edgelabel=edgelabels,
+                        **attrs,
+                    )
+
+            # restriction
+            elif isinstance(relation, owlready2.Restriction):
+                rname = get_label(relation.property)
+                if "all" in relations or rname in relations:
+                    rlabel = f"{rname} {typenames[relation.type]}"
+                    if isinstance(relation.value, owlready2.ThingClass):
+                        obj = get_label(relation.value)
+                        if not self.add_missing_node(relation.value, addnodes):
+                            continue
+                    elif (
+                        isinstance(relation.value, owlready2.ClassConstruct)
+                        and self.addconstructs
+                    ):
+                        obj = self.add_class_construct(relation.value)
+                    else:
+                        continue
+                    pred = asstring(
+                        relation, exclude_object=True, ontology=self.ontology
+                    )
+                    self.add_edge(
+                        label, pred, obj, edgelabel=edgelabels, **attrs
+                    )
+
+            # inverse
+            if isinstance(relation, owlready2.Inverse):
+                if "all" in relations or "inverse" in relations:
+                    rlabel = get_label(relation)
+                    if not self.add_missing_node(relation, addnodes=addnodes):
+                        continue
+                    if relation not in entity.get_parents(strict=True):
+                        continue
+                    self.add_edge(
+                        subject=label,
+                        predicate="inverse",
+                        obj=rlabel,
+                        edgelabel=edgelabels,
+                        **attrs,
+                    )
+
+    def add_edges(  # pylint: disable=too-many-arguments
+        self,
+        sources=None,
+        relations=None,
+        edgelabels=None,
+        addnodes=None,
+        addconstructs=None,
+        **attrs,
+    ):
+        """Adds all relations originating from entities `sources` who's type
+        are listed in `relations`.  If `sources` is None, edges are added
+        between all current nodes."""
+        if sources is None:
+            sources = self.nodes
+        for source in sources.copy():
+            self.add_source_edges(
+                source,
+                relations=relations,
+                edgelabels=edgelabels,
+                addnodes=addnodes,
+                addconstructs=addconstructs,
+                **attrs,
+            )
+
+    def add_missing_node(self, name, addnodes=None):
+        """Checks if `name` corresponds to a missing node and add it if
+        `addnodes` is true.
+
+        Returns true if the node exists or is added, false otherwise."""
+        addnodes = self.addnodes if addnodes is None else addnodes
+        entity = self.ontology[name] if isinstance(name, str) else name
+        label = get_label(entity)
+        if label not in self.nodes:
+            if addnodes:
+                self.add_node(entity, **self.style.get("added_node", {}))
+            else:
+                return False
+        return True
+
+    def add_class_construct(self, construct):
+        """Adds class construct and return its label."""
+        self.add_node(construct, **self.style.get("class_construct", {}))
+        label = get_label(construct)
+        if isinstance(construct, owlready2.Or):
+            for cls in construct.Classes:
+                clslabel = get_label(cls)
+                if clslabel not in self.nodes and self.addnodes:
+                    self.add_node(cls)
+                if clslabel in self.nodes:
+                    self.add_edge(get_label(cls), "isA", label)
+        elif isinstance(construct, owlready2.And):
+            for cls in construct.Classes:
+                clslabel = get_label(cls)
+                if clslabel not in self.nodes and self.addnodes:
+                    self.add_node(cls)
+                if clslabel in self.nodes:
+                    self.add_edge(label, "isA", get_label(cls))
+        elif isinstance(construct, owlready2.Not):
+            clslabel = get_label(construct.Class)
+            if clslabel not in self.nodes and self.addnodes:
+                self.add_node(construct.Class)
+            if clslabel in self.nodes:
+                self.add_edge(clslabel, "not", label)
+        # Neither and nor inverse constructs are
+        return label
+
+    def get_node_attrs(self, name, nodeattrs, attrs):
+        """Returns attributes for node or edge `name`.  `attrs` overrides
+        the default style."""
+        entity = self.ontology[name] if isinstance(name, str) else name
+        label = get_label(entity)
+        # class
+        if isinstance(entity, owlready2.ThingClass):
+            if entity.is_defined:
+                kwargs = self.style.get("defined_class", {})
+            else:
+                kwargs = self.style.get("class", {})
+        # class construct
+        elif isinstance(entity, owlready2.ClassConstruct):
+            kwargs = self.style.get("class_construct", {})
+        # individual
+        elif isinstance(entity, owlready2.Thing):
+            kwargs = self.style.get("individual", {})
+        # object property
+        elif isinstance(entity, owlready2.ObjectPropertyClass):
+            kwargs = self.style.get("object_property", {})
+        # data property
+        elif isinstance(entity, owlready2.DataPropertyClass):
+            kwargs = self.style.get("data_property", {})
+        # annotation property
+        elif isinstance(entity, owlready2.AnnotationPropertyClass):
+            kwargs = self.style.get("annotation_property", {})
+        else:
+            raise TypeError(f"Unknown entity type: {entity!r}")
+        kwargs = kwargs.copy()
+        kwargs.update(self.style.get("nodes", {}).get(label, {}))
+        if nodeattrs:
+            kwargs.update(nodeattrs.get(label, {}))
+        kwargs.update(attrs)
+        return kwargs
+
+    def _relation_styles(
+        self, entity: ThingClass, relations: dict, rels: set
+    ) -> dict:
+        """Helper function that returns the styles of the relations
+        to be used.
+
+        Parameters:
+            entity: the entity of the parent relation
+            relations: relations with default styles
+            rels: relations to be considered that have default styles,
+                either for the prefLabel or one of the altLabels
+        """
+        for relation in entity.mro():
+            if relation in rels:
+                if str(get_label(relation)) in relations:
+                    rattrs = relations[str(get_label(relation))]
+                else:
+                    for alt_label in relation.get_annotations()["altLabel"]:
+                        rattrs = relations[str(alt_label)]
+
+                break
+        else:
+            warnings.warn(
+                f"Style not defined for relation {get_label(entity)}. "
+                "Resorting to default style."
+            )
+            rattrs = self.style.get("default_relation", {})
+        return rattrs
+
+    def get_edge_attrs(self, predicate: str, attrs: dict) -> dict:
+        """Returns attributes for node or edge `predicate`.  `attrs` overrides
+        the default style.
+
+        Parameters:
+            predicate: predicate to get attributes for
+            attrs: desired attributes to override default
+        """
+        # given type
+        types = ("isA", "equivalent_to", "disjoint_with", "inverse_of")
+        if predicate in types:
+            kwargs = self.style.get(predicate, {}).copy()
+        else:
+            kwargs = {}
+            name = predicate.split(None, 1)[0]
+            match = re.match(r"Inverse\((.*)\)", name)
+            if match:
+                (name,) = match.groups()
+                attrs = attrs.copy()
+                for key, value in self.style.get("inverse", {}).items():
+                    attrs.setdefault(key, value)
+            if not isinstance(name, str) or name in self.ontology:
+                entity = self.ontology[name] if isinstance(name, str) else name
+                relations = self.style.get("relations", {})
+                rels = set(
+                    self.ontology[_] for _ in relations if _ in self.ontology
+                )
+                rattrs = self._relation_styles(entity, relations, rels)
+
+                # object property
+                if isinstance(
+                    entity,
+                    (owlready2.ObjectPropertyClass, owlready2.ObjectProperty),
+                ):
+                    kwargs = self.style.get("default_relation", {}).copy()
+                    kwargs.update(rattrs)
+                # data property
+                elif isinstance(
+                    entity,
+                    (owlready2.DataPropertyClass, owlready2.DataProperty),
+                ):
+                    kwargs = self.style.get("default_dataprop", {}).copy()
+                    kwargs.update(rattrs)
+                else:
+                    raise TypeError(f"Unknown entity type: {entity!r}")
+        kwargs.update(self.style.get("edges", {}).get(predicate, {}))
+        kwargs.update(attrs)
+        return kwargs
+
+    def add_legend(self, relations=None):
+        """Adds legend for specified relations to the graph.
+
+        If `relations` is "all", the legend will contain all relations
+        that are defined in the style.  By default the legend will
+        only contain relations that are currently included in the
+        graph.
+
+        Hence, you usually want to call add_legend() as the last method
+        before saving or displaying.
+
+        Relations with defined style will be bold in legend.
+        Relations that have inherited style from parent relation
+        will not be bold.
+        """
+        rels = self.style.get("relations", {})
+        if relations is None:
+            relations = self.get_relations(sort=True)
+        elif relations == "all":
+            relations = ["isA"] + list(rels.keys()) + ["inverse"]
+        elif isinstance(relations, str):
+            relations = relations.split(",")
+
+        nrelations = len(relations)
+        if nrelations == 0:
+            return
+
+        table = (
+            '<<table border="0" cellpadding="2" cellspacing="0" cellborder="0">'
+        )
+        label1 = [table]
+        label2 = [table]
+        for index, relation in enumerate(relations):
+            if (relation in rels) or (relation == "isA"):
+                label1.append(
+                    f'<tr><td align="right" '
+                    f'port="i{index}"><b>{relation}</b></td></tr>'
+                )
+            else:
+                label1.append(
+                    f'<tr><td align="right" '
+                    f'port="i{index}">{relation}</td></tr>'
+                )
+            label2.append(f'<tr><td port="i{index}">&nbsp;</td></tr>')
+        label1.append("</table>>")
+        label2.append("</table>>")
+        self.dot.node("key1", label="\n".join(label1), shape="plaintext")
+        self.dot.node("key2", label="\n".join(label2), shape="plaintext")
+
+        rankdir = self.dot.graph_attr.get("rankdir", "TB")
+        constraint = "false" if rankdir in ("TB", "BT") else "true"
+        inv = rankdir in ("BT",)
+
+        for index in range(nrelations):
+            relation = (
+                relations[nrelations - 1 - index] if inv else relations[index]
+            )
+            if relation == "inverse":
+                kwargs = self.style.get("inverse", {}).copy()
+            else:
+                kwargs = self.get_edge_attrs(relation, {}).copy()
+            kwargs["constraint"] = constraint
+            with self.dot.subgraph(name=f"sub{index}") as subgraph:
+                subgraph.attr(rank="same")
+                if rankdir in ("BT", "LR"):
+                    self.dot.edge(
+                        f"key1:i{index}:e", f"key2:i{index}:w", **kwargs
+                    )
+                else:
+                    self.dot.edge(
+                        f"key2:i{index}:w", f"key1:i{index}:e", **kwargs
+                    )
+
+    def get_relations(self, sort=True):
+        """Returns a set of relations in current graph.  If `sort` is true,
+        a sorted list is returned."""
+        relations = set()
+        for _, predicate, _ in self.edges:
+            if predicate.startswith("Inverse"):
+                relations.add("inverse")
+                match = re.match(r"Inverse\((.+)\)", predicate)
+                if match is None:
+                    raise ValueError(
+                        "Could unexpectedly not find the inverse relation "
+                        f"just added in: {predicate}"
+                    )
+                relations.add(match.groups()[0])
+            else:
+                relations.add(predicate.split(None, 1)[0])
+
+        # Sort, but place 'isA' first and 'inverse' last
+        if sort:
+            start, end = [], []
+            if "isA" in relations:
+                relations.remove("isA")
+                start.append("isA")
+            if "inverse" in relations:
+                relations.remove("inverse")
+                end.append("inverse")
+            relations = start + sorted(relations) + end
+
+        return relations
+
+    def save(self, filename, fmt=None, **kwargs):
+        """Saves graph to `filename`.  If format is not given, it is
+        inferred from `filename`."""
+        base = os.path.splitext(filename)[0]
+        fmt = get_format(filename, default="svg", fmt=fmt)
+        kwargs.setdefault("cleanup", True)
+        if fmt in ("graphviz", "gv"):
+            if "dictionary" in kwargs:
+                self.dot.save(filename, dictionary=kwargs["dictionary"])
+            else:
+                self.dot.save(filename)
+        else:
+            fmt = kwargs.pop("format", fmt)
+            self.dot.render(base, format=fmt, **kwargs)
+
+    def view(self):
+        """Shows the graph in a viewer."""
+        self.dot.view(cleanup=True)
+
+    def get_figsize(self):
+        """Returns the default figure size (width, height) in points."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            tmpfile = os.path.join(tmpdir, "graph.svg")
+            self.save(tmpfile)
+            xml = ET.parse(tmpfile)
+            svg = xml.getroot()
+            width = svg.attrib["width"]
+            height = svg.attrib["height"]
+            if not width.endswith("pt"):
+                # ensure that units are in points
+                raise ValueError(
+                    "The width attribute should always be given in 'pt', "
+                    f"but it is: {width}"
+                )
+
+            def asfloat(string):
+                return float(re.match(r"^[\d.]+", string).group())
+
+        return asfloat(width), asfloat(height)
+
+
+ + + +
+ + + + + + + + + + +
+ + + +

+add_branch(self, root, leaves=None, include_leaves=True, strict_leaves=False, exclude=None, relations='isA', edgelabels=None, addnodes=False, addconstructs=False, included_namespaces=(), included_ontologies=(), include_parents='closest', **attrs) + + +

+ +
+ +

Adds branch under root ending at any entity included in the +sequence leaves. If include_leaves is true, leaf classes are +also included.

+ +
+ Source code in ontopy/graph.py +
def add_branch(  # pylint: disable=too-many-arguments,too-many-locals
+    self,
+    root,
+    leaves=None,
+    include_leaves=True,
+    strict_leaves=False,
+    exclude=None,
+    relations="isA",
+    edgelabels=None,
+    addnodes=False,
+    addconstructs=False,
+    included_namespaces=(),
+    included_ontologies=(),
+    include_parents="closest",
+    **attrs,
+):
+    """Adds branch under `root` ending at any entity included in the
+    sequence `leaves`.  If `include_leaves` is true, leaf classes are
+    also included."""
+    if leaves is None:
+        leaves = ()
+    classes = self.ontology.get_branch(
+        root=root,
+        leaves=leaves,
+        include_leaves=include_leaves,
+        strict_leaves=strict_leaves,
+        exclude=exclude,
+    )
+
+    classes = filter_classes(
+        classes,
+        included_namespaces=included_namespaces,
+        included_ontologies=included_ontologies,
+    )
+
+    nodeattrs = {}
+    nodeattrs[get_label(root)] = self.style.get("root", {})
+    for leaf in leaves:
+        nodeattrs[get_label(leaf)] = self.style.get("leaf", {})
+
+    self.add_entities(
+        entities=classes,
+        relations=relations,
+        edgelabels=edgelabels,
+        addnodes=addnodes,
+        addconstructs=addconstructs,
+        nodeattrs=nodeattrs,
+        **attrs,
+    )
+    closest_ancestors = False
+    ancestor_generations = None
+    if include_parents == "closest":
+        closest_ancestors = True
+    elif isinstance(include_parents, int):
+        ancestor_generations = include_parents
+    parents = self.ontology.get_ancestors(
+        classes,
+        closest=closest_ancestors,
+        generations=ancestor_generations,
+        strict=True,
+    )
+    if parents:
+        for parent in parents:
+            nodeattrs[get_label(parent)] = self.style.get("parent_node", {})
+        self.add_entities(
+            entities=parents,
+            relations=relations,
+            edgelabels=edgelabels,
+            addnodes=addnodes,
+            addconstructs=addconstructs,
+            nodeattrs=nodeattrs,
+            **attrs,
+        )
+
+
+
+ +
+ + + +
+ + + +

+add_class_construct(self, construct) + + +

+ +
+ +

Adds class construct and return its label.

+ +
+ Source code in ontopy/graph.py +
def add_class_construct(self, construct):
+    """Adds class construct and return its label."""
+    self.add_node(construct, **self.style.get("class_construct", {}))
+    label = get_label(construct)
+    if isinstance(construct, owlready2.Or):
+        for cls in construct.Classes:
+            clslabel = get_label(cls)
+            if clslabel not in self.nodes and self.addnodes:
+                self.add_node(cls)
+            if clslabel in self.nodes:
+                self.add_edge(get_label(cls), "isA", label)
+    elif isinstance(construct, owlready2.And):
+        for cls in construct.Classes:
+            clslabel = get_label(cls)
+            if clslabel not in self.nodes and self.addnodes:
+                self.add_node(cls)
+            if clslabel in self.nodes:
+                self.add_edge(label, "isA", get_label(cls))
+    elif isinstance(construct, owlready2.Not):
+        clslabel = get_label(construct.Class)
+        if clslabel not in self.nodes and self.addnodes:
+            self.add_node(construct.Class)
+        if clslabel in self.nodes:
+            self.add_edge(clslabel, "not", label)
+    # Neither and nor inverse constructs are
+    return label
+
+
+
+ +
+ + + +
+ + + +

+add_edge(self, subject, predicate, obj, edgelabel=None, **attrs) + + +

+ +
+ +

Add edge corresponding for (subject, predicate, object) +triplet.

+ +
+ Source code in ontopy/graph.py +
def add_edge(self, subject, predicate, obj, edgelabel=None, **attrs):
+    """Add edge corresponding for ``(subject, predicate, object)``
+    triplet."""
+    subject = subject if isinstance(subject, str) else get_label(subject)
+    predicate = (
+        predicate if isinstance(predicate, str) else get_label(predicate)
+    )
+    obj = obj if isinstance(obj, str) else get_label(obj)
+    if subject in self.excluded_nodes or obj in self.excluded_nodes:
+        return
+    if not isinstance(subject, str) or not isinstance(obj, str):
+        raise TypeError("`subject` and `object` must be strings")
+    if subject not in self.nodes:
+        raise RuntimeError(f'`subject` "{subject}" must have been added')
+    if obj not in self.nodes:
+        raise RuntimeError(f'`object` "{obj}" must have been added')
+    key = (subject, predicate, obj)
+    if key not in self.edges:
+        relations = self.style.get("relations", {})
+        rels = set(
+            self.ontology[_] for _ in relations if _ in self.ontology
+        )
+        if (edgelabel is None) and (
+            (predicate in rels) or (predicate == "isA")
+        ):
+            edgelabel = self.edgelabels
+        label = None
+        if edgelabel is None:
+            tokens = predicate.split()
+            if len(tokens) == 2 and tokens[1] in ("some", "only"):
+                label = f"{tokens[0]} {tokens[1]}"
+            elif len(tokens) == 3 and tokens[1] in (
+                "exactly",
+                "min",
+                "max",
+            ):
+                label = f"{tokens[0]} {tokens[1]} {tokens[2]}"
+        elif isinstance(edgelabel, str):
+            label = edgelabel
+        elif isinstance(edgelabel, dict):
+            label = edgelabel.get(predicate, predicate)
+        elif edgelabel:
+            label = predicate
+        kwargs = self.get_edge_attrs(predicate, attrs=attrs)
+        self.dot.edge(subject, obj, label=label, **kwargs)
+        self.edges.add(key)
+
+
+
+ +
+ + + +
+ + + +

+add_edges(self, sources=None, relations=None, edgelabels=None, addnodes=None, addconstructs=None, **attrs) + + +

+ +
+ +

Adds all relations originating from entities sources who's type +are listed in relations. If sources is None, edges are added +between all current nodes.

+ +
+ Source code in ontopy/graph.py +
def add_edges(  # pylint: disable=too-many-arguments
+    self,
+    sources=None,
+    relations=None,
+    edgelabels=None,
+    addnodes=None,
+    addconstructs=None,
+    **attrs,
+):
+    """Adds all relations originating from entities `sources` who's type
+    are listed in `relations`.  If `sources` is None, edges are added
+    between all current nodes."""
+    if sources is None:
+        sources = self.nodes
+    for source in sources.copy():
+        self.add_source_edges(
+            source,
+            relations=relations,
+            edgelabels=edgelabels,
+            addnodes=addnodes,
+            addconstructs=addconstructs,
+            **attrs,
+        )
+
+
+
+ +
+ + + +
+ + + +

+add_entities(self, entities=None, relations='isA', edgelabels=None, addnodes=False, addconstructs=False, nodeattrs=None, **attrs) + + +

+ +
+ +

Adds a sequence of entities to the graph. If entities is None, +all classes are added to the graph.

+

nodeattrs is a dict mapping node names to are attributes for +dedicated nodes.

+ +
+ Source code in ontopy/graph.py +
def add_entities(  # pylint: disable=too-many-arguments
+    self,
+    entities=None,
+    relations="isA",
+    edgelabels=None,
+    addnodes=False,
+    addconstructs=False,
+    nodeattrs=None,
+    **attrs,
+):
+    """Adds a sequence of entities to the graph.  If `entities` is None,
+    all classes are added to the graph.
+
+    `nodeattrs` is a dict mapping node names to are attributes for
+    dedicated nodes.
+    """
+    if entities is None:
+        entities = self.ontology.classes(imported=self.imported)
+    self.add_nodes(entities, nodeattrs=nodeattrs, **attrs)
+    self.add_edges(
+        relations=relations,
+        edgelabels=edgelabels,
+        addnodes=addnodes,
+        addconstructs=addconstructs,
+        **attrs,
+    )
+
+
+
+ +
+ + + +
+ + + +

+add_legend(self, relations=None) + + +

+ +
+ +

Adds legend for specified relations to the graph.

+

If relations is "all", the legend will contain all relations +that are defined in the style. By default the legend will +only contain relations that are currently included in the +graph.

+

Hence, you usually want to call add_legend() as the last method +before saving or displaying.

+

Relations with defined style will be bold in legend. +Relations that have inherited style from parent relation +will not be bold.

+ +
+ Source code in ontopy/graph.py +
def add_legend(self, relations=None):
+    """Adds legend for specified relations to the graph.
+
+    If `relations` is "all", the legend will contain all relations
+    that are defined in the style.  By default the legend will
+    only contain relations that are currently included in the
+    graph.
+
+    Hence, you usually want to call add_legend() as the last method
+    before saving or displaying.
+
+    Relations with defined style will be bold in legend.
+    Relations that have inherited style from parent relation
+    will not be bold.
+    """
+    rels = self.style.get("relations", {})
+    if relations is None:
+        relations = self.get_relations(sort=True)
+    elif relations == "all":
+        relations = ["isA"] + list(rels.keys()) + ["inverse"]
+    elif isinstance(relations, str):
+        relations = relations.split(",")
+
+    nrelations = len(relations)
+    if nrelations == 0:
+        return
+
+    table = (
+        '<<table border="0" cellpadding="2" cellspacing="0" cellborder="0">'
+    )
+    label1 = [table]
+    label2 = [table]
+    for index, relation in enumerate(relations):
+        if (relation in rels) or (relation == "isA"):
+            label1.append(
+                f'<tr><td align="right" '
+                f'port="i{index}"><b>{relation}</b></td></tr>'
+            )
+        else:
+            label1.append(
+                f'<tr><td align="right" '
+                f'port="i{index}">{relation}</td></tr>'
+            )
+        label2.append(f'<tr><td port="i{index}">&nbsp;</td></tr>')
+    label1.append("</table>>")
+    label2.append("</table>>")
+    self.dot.node("key1", label="\n".join(label1), shape="plaintext")
+    self.dot.node("key2", label="\n".join(label2), shape="plaintext")
+
+    rankdir = self.dot.graph_attr.get("rankdir", "TB")
+    constraint = "false" if rankdir in ("TB", "BT") else "true"
+    inv = rankdir in ("BT",)
+
+    for index in range(nrelations):
+        relation = (
+            relations[nrelations - 1 - index] if inv else relations[index]
+        )
+        if relation == "inverse":
+            kwargs = self.style.get("inverse", {}).copy()
+        else:
+            kwargs = self.get_edge_attrs(relation, {}).copy()
+        kwargs["constraint"] = constraint
+        with self.dot.subgraph(name=f"sub{index}") as subgraph:
+            subgraph.attr(rank="same")
+            if rankdir in ("BT", "LR"):
+                self.dot.edge(
+                    f"key1:i{index}:e", f"key2:i{index}:w", **kwargs
+                )
+            else:
+                self.dot.edge(
+                    f"key2:i{index}:w", f"key1:i{index}:e", **kwargs
+                )
+
+
+
+ +
+ + + +
+ + + +

+add_missing_node(self, name, addnodes=None) + + +

+ +
+ +

Checks if name corresponds to a missing node and add it if +addnodes is true.

+

Returns true if the node exists or is added, false otherwise.

+ +
+ Source code in ontopy/graph.py +
def add_missing_node(self, name, addnodes=None):
+    """Checks if `name` corresponds to a missing node and add it if
+    `addnodes` is true.
+
+    Returns true if the node exists or is added, false otherwise."""
+    addnodes = self.addnodes if addnodes is None else addnodes
+    entity = self.ontology[name] if isinstance(name, str) else name
+    label = get_label(entity)
+    if label not in self.nodes:
+        if addnodes:
+            self.add_node(entity, **self.style.get("added_node", {}))
+        else:
+            return False
+    return True
+
+
+
+ +
+ + + +
+ + + +

+add_node(self, name, nodeattrs=None, **attrs) + + +

+ +
+ +

Add node with given name. attrs are graphviz node attributes.

+ +
+ Source code in ontopy/graph.py +
def add_node(self, name, nodeattrs=None, **attrs):
+    """Add node with given name. `attrs` are graphviz node attributes."""
+    entity = self.ontology[name] if isinstance(name, str) else name
+    label = get_label(entity)
+    if label not in self.nodes.union(self.excluded_nodes):
+        kwargs = self.get_node_attrs(
+            entity, nodeattrs=nodeattrs, attrs=attrs
+        )
+        if hasattr(entity, "iri"):
+            kwargs.setdefault("URL", entity.iri)
+        self.dot.node(label, label=label, **kwargs)
+        self.nodes.add(label)
+
+
+
+ +
+ + + +
+ + + +

+add_nodes(self, names, nodeattrs, **attrs) + + +

+ +
+ +

Add nodes with given names. attrs are graphviz node attributes.

+ +
+ Source code in ontopy/graph.py +
def add_nodes(self, names, nodeattrs, **attrs):
+    """Add nodes with given names. `attrs` are graphviz node attributes."""
+    for name in names:
+        self.add_node(name, nodeattrs=nodeattrs, **attrs)
+
+
+
+ +
+ + + +
+ + + +

+add_parents(self, name, levels=1, relations='isA', edgelabels=None, addnodes=False, addconstructs=False, **attrs) + + +

+ +
+ +

Add levels levels of strict parents of entity name.

+ +
+ Source code in ontopy/graph.py +
def add_parents(  # pylint: disable=too-many-arguments
+    self,
+    name,
+    levels=1,
+    relations="isA",
+    edgelabels=None,
+    addnodes=False,
+    addconstructs=False,
+    **attrs,
+):
+    """Add `levels` levels of strict parents of entity `name`."""
+
+    def addparents(entity, nodes, parents):
+        if nodes > 0:
+            for parent in entity.get_parents(strict=True):
+                parents.add(parent)
+                addparents(parent, nodes - 1, parents)
+
+    entity = self.ontology[name] if isinstance(name, str) else name
+    parents = set()
+    addparents(entity, levels, parents)
+    self.add_entities(
+        entities=parents,
+        relations=relations,
+        edgelabels=edgelabels,
+        addnodes=addnodes,
+        addconstructs=addconstructs,
+        **attrs,
+    )
+
+
+
+ +
+ + + +
+ + + +

+add_source_edges(self, source, relations=None, edgelabels=None, addnodes=None, addconstructs=None, **attrs) + + +

+ +
+ +

Adds all relations originating from entity source who's type +are listed in relations.

+ +
+ Source code in ontopy/graph.py +
def add_source_edges(  # pylint: disable=too-many-arguments,too-many-branches
+    self,
+    source,
+    relations=None,
+    edgelabels=None,
+    addnodes=None,
+    addconstructs=None,
+    **attrs,
+):
+    """Adds all relations originating from entity `source` who's type
+    are listed in `relations`."""
+    if relations is None:
+        relations = self.relations
+    elif isinstance(relations, str):
+        relations = set([relations])
+    else:
+        relations = set(relations)
+
+    edgelabels = self.edgelabels if edgelabels is None else edgelabels
+    addconstructs = (
+        self.addconstructs if addconstructs is None else addconstructs
+    )
+
+    entity = self.ontology[source] if isinstance(source, str) else source
+    label = get_label(entity)
+    for relation in entity.is_a:
+        # isA
+        if isinstance(
+            relation, (owlready2.ThingClass, owlready2.ObjectPropertyClass)
+        ):
+            if "all" in relations or "isA" in relations:
+                rlabel = get_label(relation)
+                # FIXME - we actually want to include individuals...
+                if isinstance(entity, owlready2.Thing):
+                    continue
+                if relation not in entity.get_parents(strict=True):
+                    continue
+                if not self.add_missing_node(relation, addnodes=addnodes):
+                    continue
+                self.add_edge(
+                    subject=label,
+                    predicate="isA",
+                    obj=rlabel,
+                    edgelabel=edgelabels,
+                    **attrs,
+                )
+
+        # restriction
+        elif isinstance(relation, owlready2.Restriction):
+            rname = get_label(relation.property)
+            if "all" in relations or rname in relations:
+                rlabel = f"{rname} {typenames[relation.type]}"
+                if isinstance(relation.value, owlready2.ThingClass):
+                    obj = get_label(relation.value)
+                    if not self.add_missing_node(relation.value, addnodes):
+                        continue
+                elif (
+                    isinstance(relation.value, owlready2.ClassConstruct)
+                    and self.addconstructs
+                ):
+                    obj = self.add_class_construct(relation.value)
+                else:
+                    continue
+                pred = asstring(
+                    relation, exclude_object=True, ontology=self.ontology
+                )
+                self.add_edge(
+                    label, pred, obj, edgelabel=edgelabels, **attrs
+                )
+
+        # inverse
+        if isinstance(relation, owlready2.Inverse):
+            if "all" in relations or "inverse" in relations:
+                rlabel = get_label(relation)
+                if not self.add_missing_node(relation, addnodes=addnodes):
+                    continue
+                if relation not in entity.get_parents(strict=True):
+                    continue
+                self.add_edge(
+                    subject=label,
+                    predicate="inverse",
+                    obj=rlabel,
+                    edgelabel=edgelabels,
+                    **attrs,
+                )
+
+
+
+ +
+ + + +
+ + + +

+get_edge_attrs(self, predicate, attrs) + + +

+ +
+ +

Returns attributes for node or edge predicate. attrs overrides +the default style.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
predicatestr

predicate to get attributes for

required
attrsdict

desired attributes to override default

required
+
+ Source code in ontopy/graph.py +
def get_edge_attrs(self, predicate: str, attrs: dict) -> dict:
+    """Returns attributes for node or edge `predicate`.  `attrs` overrides
+    the default style.
+
+    Parameters:
+        predicate: predicate to get attributes for
+        attrs: desired attributes to override default
+    """
+    # given type
+    types = ("isA", "equivalent_to", "disjoint_with", "inverse_of")
+    if predicate in types:
+        kwargs = self.style.get(predicate, {}).copy()
+    else:
+        kwargs = {}
+        name = predicate.split(None, 1)[0]
+        match = re.match(r"Inverse\((.*)\)", name)
+        if match:
+            (name,) = match.groups()
+            attrs = attrs.copy()
+            for key, value in self.style.get("inverse", {}).items():
+                attrs.setdefault(key, value)
+        if not isinstance(name, str) or name in self.ontology:
+            entity = self.ontology[name] if isinstance(name, str) else name
+            relations = self.style.get("relations", {})
+            rels = set(
+                self.ontology[_] for _ in relations if _ in self.ontology
+            )
+            rattrs = self._relation_styles(entity, relations, rels)
+
+            # object property
+            if isinstance(
+                entity,
+                (owlready2.ObjectPropertyClass, owlready2.ObjectProperty),
+            ):
+                kwargs = self.style.get("default_relation", {}).copy()
+                kwargs.update(rattrs)
+            # data property
+            elif isinstance(
+                entity,
+                (owlready2.DataPropertyClass, owlready2.DataProperty),
+            ):
+                kwargs = self.style.get("default_dataprop", {}).copy()
+                kwargs.update(rattrs)
+            else:
+                raise TypeError(f"Unknown entity type: {entity!r}")
+    kwargs.update(self.style.get("edges", {}).get(predicate, {}))
+    kwargs.update(attrs)
+    return kwargs
+
+
+
+ +
+ + + +
+ + + +

+get_figsize(self) + + +

+ +
+ +

Returns the default figure size (width, height) in points.

+ +
+ Source code in ontopy/graph.py +
def get_figsize(self):
+    """Returns the default figure size (width, height) in points."""
+    with tempfile.TemporaryDirectory() as tmpdir:
+        tmpfile = os.path.join(tmpdir, "graph.svg")
+        self.save(tmpfile)
+        xml = ET.parse(tmpfile)
+        svg = xml.getroot()
+        width = svg.attrib["width"]
+        height = svg.attrib["height"]
+        if not width.endswith("pt"):
+            # ensure that units are in points
+            raise ValueError(
+                "The width attribute should always be given in 'pt', "
+                f"but it is: {width}"
+            )
+
+        def asfloat(string):
+            return float(re.match(r"^[\d.]+", string).group())
+
+    return asfloat(width), asfloat(height)
+
+
+
+ +
+ + + +
+ + + +

+get_node_attrs(self, name, nodeattrs, attrs) + + +

+ +
+ +

Returns attributes for node or edge name. attrs overrides +the default style.

+ +
+ Source code in ontopy/graph.py +
def get_node_attrs(self, name, nodeattrs, attrs):
+    """Returns attributes for node or edge `name`.  `attrs` overrides
+    the default style."""
+    entity = self.ontology[name] if isinstance(name, str) else name
+    label = get_label(entity)
+    # class
+    if isinstance(entity, owlready2.ThingClass):
+        if entity.is_defined:
+            kwargs = self.style.get("defined_class", {})
+        else:
+            kwargs = self.style.get("class", {})
+    # class construct
+    elif isinstance(entity, owlready2.ClassConstruct):
+        kwargs = self.style.get("class_construct", {})
+    # individual
+    elif isinstance(entity, owlready2.Thing):
+        kwargs = self.style.get("individual", {})
+    # object property
+    elif isinstance(entity, owlready2.ObjectPropertyClass):
+        kwargs = self.style.get("object_property", {})
+    # data property
+    elif isinstance(entity, owlready2.DataPropertyClass):
+        kwargs = self.style.get("data_property", {})
+    # annotation property
+    elif isinstance(entity, owlready2.AnnotationPropertyClass):
+        kwargs = self.style.get("annotation_property", {})
+    else:
+        raise TypeError(f"Unknown entity type: {entity!r}")
+    kwargs = kwargs.copy()
+    kwargs.update(self.style.get("nodes", {}).get(label, {}))
+    if nodeattrs:
+        kwargs.update(nodeattrs.get(label, {}))
+    kwargs.update(attrs)
+    return kwargs
+
+
+
+ +
+ + + +
+ + + +

+get_relations(self, sort=True) + + +

+ +
+ +

Returns a set of relations in current graph. If sort is true, +a sorted list is returned.

+ +
+ Source code in ontopy/graph.py +
def get_relations(self, sort=True):
+    """Returns a set of relations in current graph.  If `sort` is true,
+    a sorted list is returned."""
+    relations = set()
+    for _, predicate, _ in self.edges:
+        if predicate.startswith("Inverse"):
+            relations.add("inverse")
+            match = re.match(r"Inverse\((.+)\)", predicate)
+            if match is None:
+                raise ValueError(
+                    "Could unexpectedly not find the inverse relation "
+                    f"just added in: {predicate}"
+                )
+            relations.add(match.groups()[0])
+        else:
+            relations.add(predicate.split(None, 1)[0])
+
+    # Sort, but place 'isA' first and 'inverse' last
+    if sort:
+        start, end = [], []
+        if "isA" in relations:
+            relations.remove("isA")
+            start.append("isA")
+        if "inverse" in relations:
+            relations.remove("inverse")
+            end.append("inverse")
+        relations = start + sorted(relations) + end
+
+    return relations
+
+
+
+ +
+ + + +
+ + + +

+save(self, filename, fmt=None, **kwargs) + + +

+ +
+ +

Saves graph to filename. If format is not given, it is +inferred from filename.

+ +
+ Source code in ontopy/graph.py +
def save(self, filename, fmt=None, **kwargs):
+    """Saves graph to `filename`.  If format is not given, it is
+    inferred from `filename`."""
+    base = os.path.splitext(filename)[0]
+    fmt = get_format(filename, default="svg", fmt=fmt)
+    kwargs.setdefault("cleanup", True)
+    if fmt in ("graphviz", "gv"):
+        if "dictionary" in kwargs:
+            self.dot.save(filename, dictionary=kwargs["dictionary"])
+        else:
+            self.dot.save(filename)
+    else:
+        fmt = kwargs.pop("format", fmt)
+        self.dot.render(base, format=fmt, **kwargs)
+
+
+
+ +
+ + + +
+ + + +

+view(self) + + +

+ +
+ +

Shows the graph in a viewer.

+ +
+ Source code in ontopy/graph.py +
def view(self):
+    """Shows the graph in a viewer."""
+    self.dot.view(cleanup=True)
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + +
+ + + +

+check_module_dependencies(modules, verbose=True) + + +

+ +
+ +

Check module dependencies and return a copy of modules with +redundant dependencies removed.

+

If verbose is true, warnings are printed for each module that

+

If modules is given, it should be a dict returned by +get_module_dependencies().

+ +
+ Source code in ontopy/graph.py +
def check_module_dependencies(modules, verbose=True):
+    """Check module dependencies and return a copy of modules with
+    redundant dependencies removed.
+
+    If `verbose` is true, warnings are printed for each module that
+
+    If `modules` is given, it should be a dict returned by
+    get_module_dependencies().
+    """
+    visited = set()
+
+    def get_deps(iri, excl=None):
+        """Returns a set with all dependencies of `iri`, excluding `excl` and
+        its dependencies."""
+        if iri in visited:
+            return set()
+        visited.add(iri)
+        deps = set()
+        for dependency in modules[iri]:
+            if dependency != excl:
+                deps.add(dependency)
+                deps.update(get_deps(dependency))
+        return deps
+
+    mods = {}
+    redundant = []
+    for iri, deps in modules.items():
+        if not deps:
+            mods[iri] = set()
+        for dep in deps:
+            if dep in get_deps(iri, dep):
+                redundant.append((iri, dep))
+            elif iri in mods:
+                mods[iri].add(dep)
+            else:
+                mods[iri] = set([dep])
+
+    if redundant and verbose:
+        print("** Warning: Redundant module dependency:")
+        for iri, dep in redundant:
+            print(f"{iri} -> {dep}")
+
+    return mods
+
+
+
+ +
+ + + +
+ + + +

+cytoscape_style(style=None) + + +

+ +
+ +

Get list of color, style and fills.

+ +
+ Source code in ontopy/graph.py +
def cytoscape_style(style=None):  # pylint: disable=too-many-branches
+    """Get list of color, style and fills."""
+    if not style:
+        style = _default_style
+    colours = {}
+    styles = {}
+    fill = {}
+    for key, value in style.items():
+        if isinstance(value, dict):
+            if "color" in value:
+                colours[key] = value["color"]
+            else:
+                colours[key] = "black"
+            if "style" in value:
+                styles[key] = value["style"]
+            else:
+                styles[key] = "solid"
+            if "arrowhead" in value:
+                if value["arrowhead"] == "empty":
+                    fill[key] = "hollow"
+            else:
+                fill[key] = "filled"
+
+    for key, value in style.get("relations", {}).items():
+        if isinstance(value, dict):
+            if "color" in value:
+                colours[key] = value["color"]
+            else:
+                colours[key] = "black"
+            if "style" in value:
+                styles[key] = value["style"]
+            else:
+                styles[key] = "solid"
+            if "arrowhead" in value:
+                if value["arrowhead"] == "empty":
+                    fill[key] = "hollow"
+            else:
+                fill[key] = "filled"
+    return [colours, styles, fill]
+
+
+
+ +
+ + + +
+ + + +

+cytoscapegraph(graph, onto=None, infobox=None, force=False) + + +

+ +
+ +

Returns and instance of icytoscape-figure for an +instance Graph of OntoGraph, the accompanying ontology +is required for mouse actions.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
graphOntoGraph

graph generated with OntoGraph with edgelabels=True.

required
ontoOptional[ontopy.ontology.Ontology]

ontology to be used for mouse actions.

None
infoboxstr

"left" or "right". Placement of infbox with + respect to graph.

None
forcebool

force generate graph without correct edgelabels.

False
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
GridspecLayout

cytoscapewidget with graph and infobox to be visualized +in jupyter lab.

+
+ Source code in ontopy/graph.py +
def cytoscapegraph(
+    graph: OntoGraph,
+    onto: Optional[Ontology] = None,
+    infobox: str = None,
+    force: bool = False,
+) -> "GridspecLayout":
+    # pylint: disable=too-many-locals,too-many-statements
+    """Returns and instance of icytoscape-figure for an
+    instance Graph of OntoGraph, the accompanying ontology
+    is required for mouse actions.
+    Args:
+            graph: graph generated with OntoGraph with edgelabels=True.
+            onto: ontology to be used for mouse actions.
+            infobox: "left" or "right". Placement of infbox with
+                     respect to graph.
+            force: force generate graph without correct edgelabels.
+    Returns:
+            cytoscapewidget with graph and infobox to be visualized
+            in jupyter lab.
+
+    """
+    # pylint: disable=import-error,import-outside-toplevel
+    from ipywidgets import Output, VBox, GridspecLayout
+    from IPython.display import display, Image
+    from pathlib import Path
+    import networkx as nx
+    import pydotplus
+    import ipycytoscape
+    from networkx.readwrite.json_graph import cytoscape_data
+
+    # Define the styles, this has to be aligned with the graphviz values
+    dotplus = pydotplus.graph_from_dot_data(graph.dot.source)
+    # if graph doesn't have multiedges, use dotplus.set_strict(true)
+    pydot_graph = nx.nx_pydot.from_pydot(dotplus)
+
+    colours, styles, fill = cytoscape_style()
+
+    data = cytoscape_data(pydot_graph)["elements"]
+    for datum in data["edges"]:
+        try:
+            datum["data"]["label"] = (
+                datum["data"]["label"].rsplit(" ", 1)[0].lstrip('"')
+            )
+        except KeyError as err:
+            if not force:
+                raise EMMOntoPyException(
+                    "Edge label is not defined. Are you sure that the OntoGraph"
+                    "instance you provided was generated with "
+                    "´edgelabels=True´?"
+                ) from err
+            warnings.warn(
+                "ARROWS WILL NOT BE DISPLAYED CORRECTLY. "
+                "Edge label is not defined. Are you sure that the OntoGraph "
+                "instance you provided was generated with ´edgelabels=True´?"
+            )
+            datum["data"]["label"] = ""
+
+        lab = datum["data"]["label"].replace("Inverse(", "").rstrip(")")
+        try:
+            datum["data"]["colour"] = colours[lab]
+        except KeyError:
+            datum["data"]["colour"] = "black"
+        try:
+            datum["data"]["style"] = styles[lab]
+        except KeyError:
+            datum["data"]["style"] = "solid"
+        if datum["data"]["label"].startswith("Inverse("):
+            datum["data"]["targetarrow"] = "diamond"
+            datum["data"]["sourcearrow"] = "none"
+        else:
+            datum["data"]["targetarrow"] = "triangle"
+            datum["data"]["sourcearrow"] = "none"
+        try:
+            datum["data"]["fill"] = fill[lab]
+        except KeyError:
+            datum["data"]["fill"] = "filled"
+
+    cytofig = ipycytoscape.CytoscapeWidget()
+    cytofig.graph.add_graph_from_json(data, directed=True)
+
+    cytofig.set_style(
+        [
+            {
+                "selector": "node",
+                "css": {
+                    "content": "data(label)",
+                    # "text-valign": "center",
+                    # "color": "white",
+                    # "text-outline-width": 2,
+                    # "text-outline-color": "red",
+                    "background-color": "blue",
+                },
+            },
+            {"selector": "node:parent", "css": {"background-opacity": 0.333}},
+            {
+                "selector": "edge",
+                "style": {
+                    "width": 2,
+                    "line-color": "data(colour)",
+                    # "content": "data(label)"",
+                    "line-style": "data(style)",
+                },
+            },
+            {
+                "selector": "edge.directed",
+                "style": {
+                    "curve-style": "bezier",
+                    "target-arrow-shape": "data(targetarrow)",
+                    "target-arrow-color": "data(colour)",
+                    "target-arrow-fill": "data(fill)",
+                    "mid-source-arrow-shape": "data(sourcearrow)",
+                    "mid-source-arrow-color": "data(colour)",
+                },
+            },
+            {
+                "selector": "edge.multiple_edges",
+                "style": {"curve-style": "bezier"},
+            },
+            {
+                "selector": ":selected",
+                "css": {
+                    "background-color": "black",
+                    "line-color": "black",
+                    "target-arrow-color": "black",
+                    "source-arrow-color": "black",
+                    "text-outline-color": "black",
+                },
+            },
+        ]
+    )
+
+    if onto is not None:
+        out = Output(layout={"border": "1px solid black"})
+
+        def log_clicks(node):
+            with out:
+                print((onto.get_by_label(node["data"]["label"])))
+                parent = onto.get_by_label(node["data"]["label"]).get_parents()
+                print(f"parents: {parent}")
+                try:
+                    elucidation = onto.get_by_label(
+                        node["data"]["label"]
+                    ).elucidation
+                    print(f"elucidation: {elucidation[0]}")
+                except (AttributeError, IndexError):
+                    pass
+
+                try:
+                    annotations = onto.get_by_label(
+                        node["data"]["label"]
+                    ).annotations
+                    for _ in annotations:
+                        print(f"annotation: {_}")
+                except AttributeError:
+                    pass
+
+                # Try does not work...
+                try:
+                    iri = onto.get_by_label(node["data"]["label"]).iri
+                    print(f"iri: {iri}")
+                except (AttributeError, IndexError):
+                    pass
+                try:
+                    fig = node["data"]["label"]
+                    if os.path.exists(Path(fig + ".png")):
+                        display(Image(fig + ".png", width=100))
+                    elif os.path.exists(Path(fig + ".jpg")):
+                        display(Image(fig + ".jpg", width=100))
+                except (AttributeError, IndexError):
+                    pass
+                out.clear_output(wait=True)
+
+        def log_mouseovers(node):
+            with out:
+                print(onto.get_by_label(node["data"]["label"]))
+                # print(f'mouseover: {pformat(node)}')
+            out.clear_output(wait=True)
+
+        cytofig.on("node", "click", log_clicks)
+        cytofig.on("node", "mouseover", log_mouseovers)  # , remove=True)
+        cytofig.on("node", "mouseout", out.clear_output(wait=True))
+        grid = GridspecLayout(1, 3, height="400px")
+        if infobox == "left":
+            grid[0, 0] = out
+            grid[0, 1:] = cytofig
+        elif infobox == "right":
+            grid[0, 0:-1] = cytofig
+            grid[0, 2] = out
+        else:
+            return VBox([cytofig, out])
+        return grid
+
+    return cytofig
+
+
+
+ +
+ + + +
+ + + +

+filter_classes(classes, included_namespaces=(), included_ontologies=()) + + +

+ +
+ +

Filter out classes whos namespace is not in included_namespaces +or whos ontology name is not in one of the ontologies in +included_ontologies.

+

classes should be a sequence of classes.

+ +
+ Source code in ontopy/graph.py +
def filter_classes(classes, included_namespaces=(), included_ontologies=()):
+    """Filter out classes whos namespace is not in `included_namespaces`
+    or whos ontology name is not in one of the ontologies in
+    `included_ontologies`.
+
+    `classes` should be a sequence of classes.
+    """
+    filtered = set(classes)
+    if included_namespaces:
+        filtered = set(
+            c for c in filtered if c.namespace.name in included_namespaces
+        )
+    if included_ontologies:
+        filtered = set(
+            c
+            for c in filtered
+            if c.namespace.ontology.name in included_ontologies
+        )
+    return filtered
+
+
+
+ +
+ + + +
+ + + +

+get_module_dependencies(iri_or_onto, strip_base=None) + + +

+ +
+ +

Reads iri_or_onto and returns a dict mapping ontology names to a +list of ontologies that they depends on.

+

If strip_base is true, the base IRI is stripped from ontology +names. If it is a string, it lstrip'ped from the base iri.

+ +
+ Source code in ontopy/graph.py +
def get_module_dependencies(iri_or_onto, strip_base=None):
+    """Reads `iri_or_onto` and returns a dict mapping ontology names to a
+    list of ontologies that they depends on.
+
+    If `strip_base` is true, the base IRI is stripped from ontology
+    names.  If it is a string, it lstrip'ped from the base iri.
+    """
+    from ontopy.ontology import (  # pylint: disable=import-outside-toplevel
+        get_ontology,
+    )
+
+    if isinstance(iri_or_onto, str):
+        onto = get_ontology(iri_or_onto)
+        onto.load()
+    else:
+        onto = iri_or_onto
+
+    modules = {onto.base_iri: set()}
+
+    def strip(base_iri):
+        if isinstance(strip_base, str):
+            return base_iri.lstrip(strip_base)
+        if strip_base:
+            return base_iri.strip(onto.base_iri)
+        return base_iri
+
+    visited = set()
+
+    def setmodules(onto):
+        for imported_onto in onto.imported_ontologies:
+            if onto.base_iri in modules:
+                modules[strip(onto.base_iri)].add(strip(imported_onto.base_iri))
+            else:
+                modules[strip(onto.base_iri)] = set(
+                    [strip(imported_onto.base_iri)]
+                )
+            if imported_onto.base_iri not in modules:
+                modules[strip(imported_onto.base_iri)] = set()
+            if imported_onto not in visited:
+                visited.add(imported_onto)
+                setmodules(imported_onto)
+
+    setmodules(onto)
+    return modules
+
+
+
+ +
+ + + +
+ + + +

+plot_modules(src, filename=None, fmt=None, show=False, strip_base=None, ignore_redundant=True) + + +

+ +
+ +

Plot module dependency graph for src and return a graph object.

+

Here src may be an IRI, a path the the ontology or a dict returned by +get_module_dependencies().

+

If filename is given, write the graph to this file.

+

If fmt is None, the output format is inferred from filename.

+

If show is true, the graph is displayed.

+

strip_base is passed on to get_module_dependencies() if src is not +a dict.

+

If ignore_redundant is true, redundant dependencies are not plotted.

+ +
+ Source code in ontopy/graph.py +
def plot_modules(  # pylint: disable=too-many-arguments
+    src,
+    filename=None,
+    fmt=None,
+    show=False,
+    strip_base=None,
+    ignore_redundant=True,
+):
+    """Plot module dependency graph for `src` and return a graph object.
+
+    Here `src` may be an IRI, a path the the ontology or a dict returned by
+    get_module_dependencies().
+
+    If `filename` is given, write the graph to this file.
+
+    If `fmt` is None, the output format is inferred from `filename`.
+
+    If `show` is true, the graph is displayed.
+
+    `strip_base` is passed on to get_module_dependencies() if `src` is not
+    a dict.
+
+    If `ignore_redundant` is true, redundant dependencies are not plotted.
+    """
+    if isinstance(src, dict):
+        modules = src
+    else:
+        modules = get_module_dependencies(src, strip_base=strip_base)
+
+    if ignore_redundant:
+        modules = check_module_dependencies(modules, verbose=False)
+
+    dot = graphviz.Digraph(comment="Module dependencies")
+    dot.attr(rankdir="TB")
+    dot.node_attr.update(
+        style="filled", fillcolor="lightblue", shape="box", edgecolor="blue"
+    )
+    dot.edge_attr.update(arrowtail="open", dir="back")
+
+    for iri in modules.keys():
+        iriname = iri.split(":", 1)[1]
+        dot.node(iriname, label=iri, URL=iri)
+
+    for iri, deps in modules.items():
+        for dep in deps:
+            iriname = iri.split(":", 1)[1]
+            depname = dep.split(":", 1)[1]
+            dot.edge(depname, iriname)
+
+    if filename:
+        base, ext = os.path.splitext(filename)
+        if fmt is None:
+            fmt = ext.lstrip(".")
+        dot.render(base, format=fmt, view=False, cleanup=True)
+
+    if show:
+        dot.view(cleanup=True)
+
+    return dot
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/ontopy/manchester/index.html b/0.6.1/api_reference/ontopy/manchester/index.html new file mode 100644 index 000000000..b936980a1 --- /dev/null +++ b/0.6.1/api_reference/ontopy/manchester/index.html @@ -0,0 +1,1745 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + manchester - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + +

manchester

+ + +
+ + +
+ +

Evaluate Manchester syntax

+

This module compiles restrictions and logical constructs in Manchester +syntax into Owlready2 classes. The main function in this module is +manchester.evaluate(), see its docstring for usage example.

+

Pyparsing is used under the hood for parsing.

+ + + +
+ + + + + + + + +
+ + + +

+ +ManchesterError (EMMOntoPyException) + + + + +

+ +
+ +

Raised on invalid Manchester notation.

+ +
+ Source code in ontopy/manchester.py +
class ManchesterError(EMMOntoPyException):
+    """Raised on invalid Manchester notation."""
+
+
+ + +
+ +
+ + + + +
+ + + +

+evaluate(ontology, expr) + + +

+ +
+ +

Evaluate expression in Manchester syntax.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
ontologyOntology

The ontology within which the expression will be evaluated.

required
exprstr

Manchester expression to be evaluated.

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Construct

An Owlready2 construct that corresponds to the expression.

+

Examples:

+ +
+
+
+

from ontopy.manchester import evaluate +from ontopy import get_ontology +emmo = get_ontology().load()

+

restriction = evaluate(emmo, 'hasPart some Atom') +cls = evaluate(emmo, 'Atom') +expr = evaluate(emmo, 'Atom or Molecule')

+
+
+
+
+

Note

+

Logical expressions (with not, and and or) are supported as +well as object property restrictions. For data properterties are +only value restrictions supported so far.

+
+ +
+ Source code in ontopy/manchester.py +
def evaluate(ontology: owlready2.Ontology, expr: str) -> owlready2.Construct:
+    """Evaluate expression in Manchester syntax.
+
+    Args:
+        ontology: The ontology within which the expression will be evaluated.
+        expr: Manchester expression to be evaluated.
+
+    Returns:
+        An Owlready2 construct that corresponds to the expression.
+
+    Example:
+    >>> from ontopy.manchester import evaluate
+    >>> from ontopy import get_ontology
+    >>> emmo = get_ontology().load()
+
+    >>> restriction = evaluate(emmo, 'hasPart some Atom')
+    >>> cls = evaluate(emmo, 'Atom')
+    >>> expr = evaluate(emmo, 'Atom or Molecule')
+
+    Note:
+        Logical expressions (with `not`, `and` and `or`) are supported as
+        well as object property restrictions.  For data properterties are
+        only value restrictions supported so far.
+    """
+
+    # pylint: disable=invalid-name
+    def _parse_literal(r):
+        """Compiles literal to Owlready2 type."""
+        if r.language:
+            v = owlready2.locstr(r.string, r.language)
+        elif r.number:
+            v = r.number
+        else:
+            v = r.string
+        return v
+
+    # pylint: disable=invalid-name,no-else-return,too-many-return-statements
+    # pylint: disable=too-many-branches
+    def _eval(r):
+        """Recursively evaluate expression produced by pyparsing into an
+        Owlready2 construct."""
+
+        def fneg(x):
+            """Negates the argument if `neg` is true."""
+            return owlready2.Not(x) if neg else x
+
+        if isinstance(r, str):  # r is atomic, returns its owlready2 repr
+            return ontology[r]
+        neg = False  # whether the expression starts with "not"
+        while r[0] == "not":
+            r.pop(0)  # strip off the "not" and proceed
+            neg = not neg
+
+        if len(r) == 1:  # r is either a atomic or a parenthesised
+            # subexpression that should be further evaluated
+            if isinstance(r[0], str):
+                return fneg(ontology[r[0]])
+            else:
+                return fneg(_eval(r[0]))
+        elif r.op:  # r contains a logical operator: and/or
+            ops = {"and": owlready2.And, "or": owlready2.Or}
+            op = ops[r.op]
+            if len(r) == 3:
+                return op([fneg(_eval(r[0])), _eval(r[2])])
+            else:
+                arg1 = fneg(_eval(r[0]))
+                r.pop(0)
+                r.pop(0)
+                return op([arg1, _eval(r)])
+        elif r.objProp:  # r is a restriction
+            if r[0] == "inverse":
+                r.pop(0)
+                prop = owlready2.Inverse(ontology[r[0]])
+            else:
+                prop = ontology[r[0]]
+            rtype = r[1]
+            if rtype == "Self":
+                return fneg(prop.has_self())
+            r.pop(0)
+            r.pop(0)
+            f = getattr(prop, rtype)
+            if rtype == "value":
+                return fneg(f(_eval(r)))
+            elif rtype in ("some", "only"):
+                return fneg(f(_eval(r)))
+            elif rtype in ("min", "max", "exactly"):
+                cardinality = r.pop(0)
+                return fneg(f(cardinality, _eval(r)))
+            else:
+                raise ManchesterError(f"invalid restriction type: {rtype}")
+        elif r.dataProp:  # r is a data property restriction
+            prop = ontology[r[0]]
+            rtype = r[1]
+            r.pop(0)
+            r.pop(0)
+            f = getattr(prop, rtype)
+            if rtype == "value":
+                return f(_parse_literal(r))
+            else:
+                raise ManchesterError(
+                    f"unimplemented data property restriction: "
+                    f"{prop} {rtype} {r}"
+                )
+        else:
+            raise ManchesterError(f"invalid expression: {r}")
+
+    grammar = manchester_expression()
+    return _eval(grammar.parseString(expr, parseAll=True))
+
+
+
+ +
+ + + +
+ + + +

+manchester_expression() + + +

+ +
+ +

Returns pyparsing grammar for a Manchester expression.

+

This function is mostly for internal use.

+

See also: https://www.w3.org/TR/owl2-manchester-syntax/

+ +
+ Source code in ontopy/manchester.py +
def manchester_expression():
+    """Returns pyparsing grammar for a Manchester expression.
+
+    This function is mostly for internal use.
+
+    See also: https://www.w3.org/TR/owl2-manchester-syntax/
+    """
+    # pylint: disable=global-statement,invalid-name,too-many-locals
+    global GRAMMAR
+    if GRAMMAR:
+        return GRAMMAR
+
+    # Subset of the Manchester grammar for expressions
+    # It is based on https://www.w3.org/TR/owl2-manchester-syntax/
+    # but allows logical constructs within restrictions (like Protege)
+    ident = pp.Word(pp.alphas + "_:-", pp.alphanums + "_:-", asKeyword=True)
+    uint = pp.Word(pp.nums)
+    alphas = pp.Word(pp.alphas)
+    string = pp.Word(pp.alphanums + ":")
+    quotedString = (
+        pp.QuotedString('"""', multiline=True) | pp.QuotedString('"')
+    )("string")
+    typedLiteral = pp.Combine(quotedString + "^^" + string("datatype"))
+    stringLanguageLiteral = pp.Combine(quotedString + "@" + alphas("language"))
+    stringLiteral = quotedString
+    numberLiteral = pp.pyparsing_common.number("number")
+    literal = (
+        typedLiteral | stringLanguageLiteral | stringLiteral | numberLiteral
+    )
+    logOp = pp.one_of(["and", "or"], asKeyword=True)
+    expr = pp.Forward()
+    restriction = pp.Forward()
+    primary = pp.Keyword("not")[...] + (
+        restriction | ident("cls") | pp.nested_expr("(", ")", expr)
+    )
+    objPropExpr = (
+        pp.Literal("inverse")
+        + pp.Suppress("(")
+        + ident("objProp")
+        + pp.Suppress(")")
+        | pp.Literal("inverse") + ident("objProp")
+        | ident("objProp")
+    )
+    dataPropExpr = ident("dataProp")
+    restriction <<= (
+        objPropExpr + pp.Keyword("some") + expr
+        | objPropExpr + pp.Keyword("only") + expr
+        | objPropExpr + pp.Keyword("Self")
+        | objPropExpr + pp.Keyword("value") + ident("individual")
+        | objPropExpr + pp.Keyword("min") + uint + expr
+        | objPropExpr + pp.Keyword("max") + uint + expr
+        | objPropExpr + pp.Keyword("exactly") + uint + expr
+        | dataPropExpr + pp.Keyword("value") + literal
+    )
+    expr <<= primary + (logOp("op") + expr)[...]
+
+    GRAMMAR = expr
+    return expr
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/ontopy/nadict/index.html b/0.6.1/api_reference/ontopy/nadict/index.html new file mode 100644 index 000000000..c9974cb12 --- /dev/null +++ b/0.6.1/api_reference/ontopy/nadict/index.html @@ -0,0 +1,2280 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + nadict - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + +

nadict

+ + +
+ + +
+ +

A nested dict with both attribute and item access.

+

NA stands for Nested and Attribute.

+ + + +
+ + + + + + + +
+ + + +

+ +NADict + + + +

+ +
+ +

A nested dict with both attribute and item access.

+

It is intended to be used with keys that are valid Python +identifiers. However, except for string keys containing a dot, +there are actually no hard limitations. If a key equals an existing +attribute name, attribute access is of cause not possible.

+

Nested items can be accessed via a dot notation, as shown in the +example below.

+

Examples

+
+
+
+

n = NADict(a=1, b=NADict(c=3, d=4)) +n['a'] +1 +n.a +1 +n['b.c'] +3 +n.b.c +3 +n['b.e'] = 5 +n.b.e +5

+
+
+
+

Attributes

+

_dict : dict + Dictionary holding the actial items.

+ +
+ Source code in ontopy/nadict.py +
class NADict:
+    """A nested dict with both attribute and item access.
+
+    It is intended to be used with keys that are valid Python
+    identifiers.  However, except for string keys containing a dot,
+    there are actually no hard limitations.  If a key equals an existing
+    attribute name, attribute access is of cause not possible.
+
+    Nested items can be accessed via a dot notation, as shown in the
+    example below.
+
+    Examples
+    --------
+    >>> n = NADict(a=1, b=NADict(c=3, d=4))
+    >>> n['a']
+    1
+    >>> n.a
+    1
+    >>> n['b.c']
+    3
+    >>> n.b.c
+    3
+    >>> n['b.e'] = 5
+    >>> n.b.e
+    5
+
+    Attributes
+    ----------
+    _dict : dict
+        Dictionary holding the actial items.
+    """
+
+    def __init__(self, *args, **kw):
+        object.__setattr__(self, "_dict", {})
+        self.update(*args, **kw)
+
+    def __getitem__(self, key):
+        if "." in key:
+            key1, key2 = key.split(".", 1)
+            return self._dict[key1][key2]
+        return self._dict[key]
+
+    def __setitem__(self, key, value):
+        if key in (
+            "clear",
+            "copy",
+            "fromkeys",
+            "get",
+            "items",
+            "keys",
+            "pop",
+            "popitem",
+            "setdefault",
+            "update",
+            "values",
+        ):
+            raise ValueError(
+                f"invalid key {key!r}: must not override supported dict method"
+                " names"
+            )
+
+        if "." in key:
+            key1, key2 = key.split(".", 1)
+            if key1 not in self._dict:
+                self._dict[key1] = NADict()
+            self._dict[key1][key2] = value
+        elif key in self._dict:
+            if isinstance(self._dict[key], NADict):
+                self._dict[key].update(value)
+            else:
+                self._dict[key] = value
+        else:
+            if isinstance(value, Mapping):
+                self._dict[key] = NADict(value)
+            else:
+                self._dict[key] = value
+
+    def __delitem__(self, key):
+        if "." in key:
+            key1, key2 = key.split(".", 1)
+            del self._dict[key1][key2]
+        else:
+            del self._dict[key]
+
+    def __getattr__(self, key):
+        if key not in self._dict:
+            raise AttributeError(f"No such key: {key}")
+        return self._dict[key]
+
+    def __setattr__(self, key, value):
+        if key in self._dict:
+            self._dict[key] = value
+        else:
+            object.__setattr__(self, key, value)
+
+    def __delattr__(self, key):
+        if key in self._dict:
+            del self._dict[key]
+        else:
+            object.__delattr__(self, key)
+
+    def __len__(self):
+        return len(self._dict)
+
+    def __contains__(self, key):
+        if "." in key:
+            key1, key2 = key.split(".", 1)
+            return key2 in self._dict[key1]
+        return key in self._dict
+
+    def __iter__(self, prefix=""):
+        for key, value in self._dict.items():
+            key = f"{prefix}.{key}" if prefix else key
+            if isinstance(value, NADict):
+                yield from value.__iter__(key)
+            else:
+                yield key
+
+    def __repr__(self):
+        return (
+            f"{self.__class__.__name__}("
+            f"{', '.join(f'{key}={value!r}' for key, value in self._dict.items())})"  # pylint: disable=line-too-long
+        )
+
+    def clear(self):
+        """Clear all keys."""
+        self._dict.clear()
+
+    def copy(self):
+        """Returns a deep copy of self."""
+        return copy.deepcopy(self)
+
+    @staticmethod
+    def fromkeys(iterable, value=None):
+        """Returns a new NADict with keys from `iterable` and values
+        set to `value`."""
+        res = NADict()
+        for key in iterable:
+            res[key] = value
+        return res
+
+    def get(self, key, default=None):
+        """Returns the value for `key` if `key` is in self, else return
+        `default`."""
+        if "." in key:
+            key1, key2 = key.split(".", 1)
+            return self._dict[key1].get(key2, default)
+        return self._dict.get(key, default)
+
+    def items(self, prefix=""):
+        """Returns an iterator over all items as (key, value) pairs."""
+        for key, value in self._dict.items():
+            key = f"{prefix}.{key}" if prefix else key
+            if isinstance(value, NADict):
+                yield from value.items(key)
+            else:
+                yield (key, value)
+
+    def keys(self, prefix=""):
+        """Returns an iterator over all keys."""
+        for key, value in self._dict.items():
+            key = f"{prefix}.{key}" if prefix else key
+            if isinstance(value, NADict):
+                yield from value.keys(key)
+            else:
+                yield key
+
+    def pop(self, key, default=None):
+        """Removed `key` and returns corresponding value.  If `key` is not
+        found, `default` is returned if given, otherwise KeyError is
+        raised."""
+        if "." in key:
+            key1, key2 = key.split(".", 1)
+            return self._dict[key1].pop(key2, default)
+        return self._dict.pop(key, default)
+
+    def popitem(self, prefix=""):
+        """Removes and returns some (key, value). Raises KeyError if empty."""
+        item = self._dict.popitem()
+        if isinstance(item, NADict):
+            key, value = item
+            item2 = item.popitem(key)
+            self._dict[key] = value
+            return item2
+        key, value = self._dict.popitem()
+        key = f"{prefix}.{key}" if prefix else key
+        return (key, value)
+
+    def setdefault(self, key, value=None):
+        """Inserts `key` and `value` pair if key is not found.
+
+        Returns the new value for `key`."""
+        if "." in key:
+            key1, key2 = key.split(".", 1)
+            return self._dict[key1].setdefault(key2, value)
+        return self._dict.setdefault(key, value)
+
+    def update(self, *args, **kwargs):
+        """Updates self with dict/iterable from `args` and keyword arguments
+        from `kw`."""
+        for arg in args:
+            if hasattr(arg, "keys"):
+                for _ in arg:
+                    self[_] = arg[_]
+            else:
+                for key, value in arg:
+                    self[key] = value
+        for key, value in kwargs.items():
+            self[key] = value
+
+    def values(self):
+        """Returns a set-like providing a view of all style values."""
+        return self._dict.values()
+
+
+ + + +
+ + + + + + + + + + +
+ + + +

+clear(self) + + +

+ +
+ +

Clear all keys.

+ +
+ Source code in ontopy/nadict.py +
def clear(self):
+    """Clear all keys."""
+    self._dict.clear()
+
+
+
+ +
+ + + +
+ + + +

+copy(self) + + +

+ +
+ +

Returns a deep copy of self.

+ +
+ Source code in ontopy/nadict.py +
def copy(self):
+    """Returns a deep copy of self."""
+    return copy.deepcopy(self)
+
+
+
+ +
+ + + +
+ + + +

+fromkeys(iterable, value=None) + + + staticmethod + + +

+ +
+ +

Returns a new NADict with keys from iterable and values +set to value.

+ +
+ Source code in ontopy/nadict.py +
@staticmethod
+def fromkeys(iterable, value=None):
+    """Returns a new NADict with keys from `iterable` and values
+    set to `value`."""
+    res = NADict()
+    for key in iterable:
+        res[key] = value
+    return res
+
+
+
+ +
+ + + +
+ + + +

+get(self, key, default=None) + + +

+ +
+ +

Returns the value for key if key is in self, else return +default.

+ +
+ Source code in ontopy/nadict.py +
def get(self, key, default=None):
+    """Returns the value for `key` if `key` is in self, else return
+    `default`."""
+    if "." in key:
+        key1, key2 = key.split(".", 1)
+        return self._dict[key1].get(key2, default)
+    return self._dict.get(key, default)
+
+
+
+ +
+ + + +
+ + + +

+items(self, prefix='') + + +

+ +
+ +

Returns an iterator over all items as (key, value) pairs.

+ +
+ Source code in ontopy/nadict.py +
def items(self, prefix=""):
+    """Returns an iterator over all items as (key, value) pairs."""
+    for key, value in self._dict.items():
+        key = f"{prefix}.{key}" if prefix else key
+        if isinstance(value, NADict):
+            yield from value.items(key)
+        else:
+            yield (key, value)
+
+
+
+ +
+ + + +
+ + + +

+keys(self, prefix='') + + +

+ +
+ +

Returns an iterator over all keys.

+ +
+ Source code in ontopy/nadict.py +
def keys(self, prefix=""):
+    """Returns an iterator over all keys."""
+    for key, value in self._dict.items():
+        key = f"{prefix}.{key}" if prefix else key
+        if isinstance(value, NADict):
+            yield from value.keys(key)
+        else:
+            yield key
+
+
+
+ +
+ + + +
+ + + +

+pop(self, key, default=None) + + +

+ +
+ +

Removed key and returns corresponding value. If key is not +found, default is returned if given, otherwise KeyError is +raised.

+ +
+ Source code in ontopy/nadict.py +
def pop(self, key, default=None):
+    """Removed `key` and returns corresponding value.  If `key` is not
+    found, `default` is returned if given, otherwise KeyError is
+    raised."""
+    if "." in key:
+        key1, key2 = key.split(".", 1)
+        return self._dict[key1].pop(key2, default)
+    return self._dict.pop(key, default)
+
+
+
+ +
+ + + +
+ + + +

+popitem(self, prefix='') + + +

+ +
+ +

Removes and returns some (key, value). Raises KeyError if empty.

+ +
+ Source code in ontopy/nadict.py +
def popitem(self, prefix=""):
+    """Removes and returns some (key, value). Raises KeyError if empty."""
+    item = self._dict.popitem()
+    if isinstance(item, NADict):
+        key, value = item
+        item2 = item.popitem(key)
+        self._dict[key] = value
+        return item2
+    key, value = self._dict.popitem()
+    key = f"{prefix}.{key}" if prefix else key
+    return (key, value)
+
+
+
+ +
+ + + +
+ + + +

+setdefault(self, key, value=None) + + +

+ +
+ +

Inserts key and value pair if key is not found.

+

Returns the new value for key.

+ +
+ Source code in ontopy/nadict.py +
def setdefault(self, key, value=None):
+    """Inserts `key` and `value` pair if key is not found.
+
+    Returns the new value for `key`."""
+    if "." in key:
+        key1, key2 = key.split(".", 1)
+        return self._dict[key1].setdefault(key2, value)
+    return self._dict.setdefault(key, value)
+
+
+
+ +
+ + + +
+ + + +

+update(self, *args, **kwargs) + + +

+ +
+ +

Updates self with dict/iterable from args and keyword arguments +from kw.

+ +
+ Source code in ontopy/nadict.py +
def update(self, *args, **kwargs):
+    """Updates self with dict/iterable from `args` and keyword arguments
+    from `kw`."""
+    for arg in args:
+        if hasattr(arg, "keys"):
+            for _ in arg:
+                self[_] = arg[_]
+        else:
+            for key, value in arg:
+                self[key] = value
+    for key, value in kwargs.items():
+        self[key] = value
+
+
+
+ +
+ + + +
+ + + +

+values(self) + + +

+ +
+ +

Returns a set-like providing a view of all style values.

+ +
+ Source code in ontopy/nadict.py +
def values(self):
+    """Returns a set-like providing a view of all style values."""
+    return self._dict.values()
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/ontopy/ontodoc/index.html b/0.6.1/api_reference/ontopy/ontodoc/index.html new file mode 100644 index 000000000..f53fcee1a --- /dev/null +++ b/0.6.1/api_reference/ontopy/ontodoc/index.html @@ -0,0 +1,5464 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + ontodoc - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + +

ontodoc

+ + +
+ + +
+ +

A module for documenting ontologies.

+ + + +
+ + + + + + + +
+ + + +

+ +AttributeDict (dict) + + + + +

+ +
+ +

A dict with attribute access.

+

Note that methods like key() and update() may be overridden.

+ +
+ Source code in ontopy/ontodoc.py +
class AttributeDict(dict):
+    """A dict with attribute access.
+
+    Note that methods like key() and update() may be overridden."""
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.__dict__ = self
+
+
+ + + +
+ + + + + + + + + + + + +
+ +
+ +
+ + + +
+ + + +

+ +DocPP + + + +

+ +
+ +

Documentation pre-processor.

+

It supports the following features:

+
    +
  • +

    Comment lines

    +
    %% Comment line...
    +
    +
  • +
  • +

    Insert header with given level

    +
    %HEADER label [level=1]
    +
    +
  • +
  • +

    Insert figure with optional caption and width. filepath + should be relative to basedir. If width is 0, no width will + be specified.

    +
    %FIGURE filepath [caption='' width=0px]
    +
    +
  • +
  • +

    Include other markdown files. Header levels may be up or down with + shift

    +
    %INCLUDE filepath [shift=0]
    +
    +
  • +
  • +

    Insert generated documentation for ontology entity. The header + level may be set with header_level.

    +
    %ENTITY name [header_level=3]
    +
    +
  • +
  • +

    Insert generated documentation for ontology branch name. Options:

    +
      +
    • header_level: Header level.
    • +
    • terminated: Whether to branch should be terminated at all branch + names in the final document.
    • +
    • +

      include_leaves: Whether to include leaves as end points + to the branch.

      +

      %BRANCH name [header_level=3 terminated=1 include_leaves=0 + namespaces='' ontologies='']

      +
    • +
    +
  • +
  • +

    Insert generated figure of ontology branch name. The figure + is written to path. The default path is figdir/name, + where figdir is given at class initiation. It is recommended + to exclude the file extension from path. In this case, the + default figformat will be used (and easily adjusted to the + correct format required by the backend). leaves may be a comma- + separated list of leaf node names.

    +
    %BRANCHFIG name [path='' caption='' terminated=1 include_leaves=1
    +                 strict_leaves=1, width=0px leaves='' relations=all
    +                 edgelabels=0 namespaces='' ontologies='']
    +
    +
  • +
  • +

    This is a combination of the %HEADER and %BRANCHFIG directives.

    +
    %BRANCHHEAD name [level=2  path='' caption='' terminated=1
    +                  include_leaves=1 width=0px leaves='']
    +
    +
  • +
  • +

    This is a combination of the %HEADER, %BRANCHFIG and %BRANCH + directives. It inserts documentation of branch name, with a + header followed by a figure and then documentation of each + element.

    +
    %BRANCHDOC name [level=2  path='' title='' caption='' terminated=1
    +                 strict_leaves=1 width=0px leaves='' relations='all'
    +                 rankdir='BT' legend=1 namespaces='' ontologies='']
    +
    +
  • +
  • +

    Insert generated documentation for all entities of the given type. + Valid values of type are: "classes", "individuals", + "object_properties", "data_properties", "annotations_properties"

    +
    %ALL type [header_level=3, namespaces='', ontologies='']
    +
    +
  • +
  • +

    Insert generated figure of all entities of the given type. + Valid values of type are: "classes", "object_properties" and + "data_properties".

    +
    %ALLFIG type
    +
    +
  • +
+

Parameters

+

template : str + Input template. +ontodoc : OntoDoc instance + Instance of OntoDoc +basedir : str + Base directory for including relative file paths. +figdir : str + Default directory to store generated figures. +figformat : str + Default format for generated figures. +figscale : float + Default scaling of generated figures. +maxwidth : float + Maximum figure width. Figures larger than this will be rescaled. +imported : bool + Whether to include imported entities.

+ +
+ Source code in ontopy/ontodoc.py +
class DocPP:  # pylint: disable=too-many-instance-attributes
+    """Documentation pre-processor.
+
+    It supports the following features:
+
+      * Comment lines
+
+            %% Comment line...
+
+      * Insert header with given level
+
+            %HEADER label [level=1]
+
+      * Insert figure with optional caption and width. `filepath`
+        should be relative to `basedir`.  If width is 0, no width will
+        be specified.
+
+            %FIGURE filepath [caption='' width=0px]
+
+      * Include other markdown files.  Header levels may be up or down with
+        `shift`
+
+            %INCLUDE filepath [shift=0]
+
+      * Insert generated documentation for ontology entity.  The header
+        level may be set with `header_level`.
+
+            %ENTITY name [header_level=3]
+
+      * Insert generated documentation for ontology branch `name`.  Options:
+          - header_level: Header level.
+          - terminated: Whether to branch should be terminated at all branch
+            names in the final document.
+          - include_leaves: Whether to include leaves as end points
+            to the branch.
+
+            %BRANCH name [header_level=3 terminated=1 include_leaves=0
+                          namespaces='' ontologies='']
+
+      * Insert generated figure of ontology branch `name`.  The figure
+        is written to `path`.  The default path is `figdir`/`name`,
+        where `figdir` is given at class initiation. It is recommended
+        to exclude the file extension from `path`.  In this case, the
+        default figformat will be used (and easily adjusted to the
+        correct format required by the backend). `leaves` may be a comma-
+        separated list of leaf node names.
+
+            %BRANCHFIG name [path='' caption='' terminated=1 include_leaves=1
+                             strict_leaves=1, width=0px leaves='' relations=all
+                             edgelabels=0 namespaces='' ontologies='']
+
+      * This is a combination of the %HEADER and %BRANCHFIG directives.
+
+            %BRANCHHEAD name [level=2  path='' caption='' terminated=1
+                              include_leaves=1 width=0px leaves='']
+
+      * This is a combination of the %HEADER, %BRANCHFIG and %BRANCH
+        directives. It inserts documentation of branch `name`, with a
+        header followed by a figure and then documentation of each
+        element.
+
+            %BRANCHDOC name [level=2  path='' title='' caption='' terminated=1
+                             strict_leaves=1 width=0px leaves='' relations='all'
+                             rankdir='BT' legend=1 namespaces='' ontologies='']
+
+      * Insert generated documentation for all entities of the given type.
+        Valid values of `type` are: "classes", "individuals",
+        "object_properties", "data_properties", "annotations_properties"
+
+            %ALL type [header_level=3, namespaces='', ontologies='']
+
+      * Insert generated figure of all entities of the given type.
+        Valid values of `type` are: "classes", "object_properties" and
+        "data_properties".
+
+            %ALLFIG type
+
+    Parameters
+    ----------
+    template : str
+        Input template.
+    ontodoc : OntoDoc instance
+        Instance of OntoDoc
+    basedir : str
+        Base directory for including relative file paths.
+    figdir : str
+        Default directory to store generated figures.
+    figformat : str
+        Default format for generated figures.
+    figscale : float
+        Default scaling of generated figures.
+    maxwidth : float
+        Maximum figure width.  Figures larger than this will be rescaled.
+    imported : bool
+        Whether to include imported entities.
+    """
+
+    # FIXME - this class should be refractured:
+    #   * Instead of rescan the entire document for each pre-processer
+    #     directive, we should scan the source like by line and handle
+    #     each directive as they occour.
+    #   * The current implementation has a lot of dublicated code.
+    #   * Instead of modifying the source in-place, we should copy to a
+    #     result list. This will make good error reporting much easier.
+    #   * Branch leaves are only looked up in the file witht the %BRANCH
+    #     directive, not in all included files as expedted.
+
+    def __init__(  # pylint: disable=too-many-arguments
+        self,
+        template,
+        ontodoc,
+        basedir=".",
+        figdir="genfigs",
+        figformat="png",
+        figscale=1.0,
+        maxwidth=None,
+        imported=False,
+    ):
+        self.lines = template.split("\n")
+        self.ontodoc = ontodoc
+        self.basedir = basedir
+        self.figdir = os.path.join(basedir, figdir)
+        self.figformat = figformat
+        self.figscale = figscale
+        self.maxwidth = maxwidth
+        self.imported = imported
+        self._branch_cache = None
+        self._processed = False  # Whether process() has been called
+
+    def __str__(self):
+        return self.get_buffer()
+
+    def get_buffer(self):
+        """Returns the current buffer."""
+        return "\n".join(self.lines)
+
+    def copy(self):
+        """Returns a copy of self."""
+        docpp = DocPP(
+            "",
+            self.ontodoc,
+            self.basedir,
+            figformat=self.figformat,
+            figscale=self.figscale,
+            maxwidth=self.maxwidth,
+        )
+        docpp.lines[:] = self.lines
+        docpp.figdir = self.figdir
+        return docpp
+
+    def get_branches(self):
+        """Returns a list with all branch names as specified with %BRANCH
+        (in current and all included documents).  The returned value is
+        cached for efficiency purposes and so that it is not lost after
+        processing branches."""
+        if self._branch_cache is None:
+            names = []
+            docpp = self.copy()
+            docpp.process_includes()
+            for line in docpp.lines:
+                if line.startswith("%BRANCH"):
+                    names.append(shlex.split(line)[1])
+            self._branch_cache = names
+        return self._branch_cache
+
+    def shift_header_levels(self, shift):
+        """Shift header level of all hashtag-headers in buffer.  Underline
+        headers are ignored."""
+        if not shift:
+            return
+        pat = re.compile("^#+ ")
+        for i, line in enumerate(self.lines):
+            match = pat.match(line)
+            if match:
+                if shift > 0:
+                    self.lines[i] = "#" * shift + line
+                elif shift < 0:
+                    counter = match.end()
+                    if shift > counter:
+                        self.lines[i] = line.lstrip("# ")
+                    else:
+                        self.lines[i] = line[counter:]
+
+    def process_comments(self):
+        """Strips out comment lines starting with "%%"."""
+        self.lines = [line for line in self.lines if not line.startswith("%%")]
+
+    def process_headers(self):
+        """Expand all %HEADER specifications."""
+        for i, line in reversed(list(enumerate(self.lines))):
+            if line.startswith("%HEADER "):
+                tokens = shlex.split(line)
+                name = tokens[1]
+                opts = get_options(tokens[2:], level=1)
+                del self.lines[i]
+                self.lines[i:i] = self.ontodoc.get_header(
+                    name, int(opts.level)  # pylint: disable=no-member
+                ).split("\n")
+
+    def process_figures(self):
+        """Expand all %FIGURE specifications."""
+        for i, line in reversed(list(enumerate(self.lines))):
+            if line.startswith("%FIGURE "):
+                tokens = shlex.split(line)
+                path = tokens[1]
+                opts = get_options(tokens[2:], caption="", width=0)
+                del self.lines[i]
+                self.lines[i:i] = self.ontodoc.get_figure(
+                    os.path.join(self.basedir, path),
+                    caption=opts.caption,  # pylint: disable=no-member
+                    width=opts.width,  # pylint: disable=no-member
+                ).split("\n")
+
+    def process_entities(self):
+        """Expand all %ENTITY specifications."""
+        for i, line in reversed(list(enumerate(self.lines))):
+            if line.startswith("%ENTITY "):
+                tokens = shlex.split(line)
+                name = tokens[1]
+                opts = get_options(tokens[2:], header_level=3)
+                del self.lines[i]
+                self.lines[i:i] = self.ontodoc.itemdoc(
+                    name, int(opts.header_level)  # pylint: disable=no-member
+                ).split("\n")
+
+    def process_branches(self):
+        """Expand all %BRANCH specifications."""
+        onto = self.ontodoc.onto
+
+        # Get all branch names in final document
+        names = self.get_branches()
+        for i, line in reversed(list(enumerate(self.lines))):
+            if line.startswith("%BRANCH "):
+                tokens = shlex.split(line)
+                name = tokens[1]
+                opts = get_options(
+                    tokens[2:],
+                    header_level=3,
+                    terminated=1,
+                    include_leaves=0,
+                    namespaces="",
+                    ontologies="",
+                )
+                leaves = (
+                    names if opts.terminated else ()
+                )  # pylint: disable=no-member
+
+                included_namespaces = (
+                    opts.namespaces.split(",")
+                    if opts.namespaces
+                    else ()  # pylint: disable=no-member
+                )
+                included_ontologies = (
+                    opts.ontologies.split(",")
+                    if opts.ontologies
+                    else ()  # pylint: disable=no-member
+                )
+
+                branch = filter_classes(
+                    onto.get_branch(
+                        name, leaves, opts.include_leaves
+                    ),  # pylint: disable=no-member
+                    included_namespaces=included_namespaces,
+                    included_ontologies=included_ontologies,
+                )
+
+                del self.lines[i]
+                self.lines[i:i] = self.ontodoc.itemsdoc(
+                    branch, int(opts.header_level)  # pylint: disable=no-member
+                ).split("\n")
+
+    def _make_branchfig(  # pylint: disable=too-many-arguments,too-many-locals
+        self,
+        name: str,
+        path: "Union[Path, str]",
+        terminated: bool,
+        include_leaves: bool,
+        strict_leaves: bool,
+        width: float,
+        leaves: "Union[str, list[str]]",
+        relations: str,
+        edgelabels: str,
+        rankdir: str,
+        legend: bool,
+        included_namespaces: "Iterable[str]",
+        included_ontologies: "Iterable[str]",
+    ) -> "tuple[str, list[str], float]":
+        """Help method for process_branchfig().
+
+        Args:
+            name: name of branch root
+            path: optional figure path name
+            include_leaves: whether to include leaves as end points
+                to the branch.
+            strict_leaves: whether to strictly exclude leave descendants
+            terminated: whether the graph should be terminated at leaf nodes
+            width: optional figure width
+            leaves: optional leaf node names for graph termination
+            relations: comma-separated list of relations to include
+            edgelabels: whether to include edgelabels
+            rankdir: graph direction (BT, TB, RL, LR)
+            legend: whether to add legend
+            included_namespaces: sequence of names of namespaces to be included
+            included_ontologies: sequence of names of ontologies to be included
+
+        Returns:
+            filepath: path to generated figure
+            leaves: used list of leaf node names
+            width: actual figure width
+
+        """
+        onto = self.ontodoc.onto
+        if leaves:
+            if isinstance(leaves, str):
+                leaves = leaves.split(",")
+        elif terminated:
+            leaves = set(self.get_branches())
+            leaves.discard(name)
+        else:
+            leaves = None
+        if path:
+            figdir = os.path.dirname(path)
+            formatext = os.path.splitext(path)[1]
+            if formatext:
+                fmt = formatext.lstrip(".")
+            else:
+                fmt = self.figformat
+                path += f".{fmt}"
+        else:
+            figdir = self.figdir
+            fmt = self.figformat
+            term = "T" if terminated else ""
+            path = os.path.join(figdir, name + term) + f".{fmt}"
+
+        # Create graph
+        graph = OntoGraph(onto, graph_attr={"rankdir": rankdir})
+        graph.add_branch(
+            root=name,
+            leaves=leaves,
+            include_leaves=include_leaves,
+            strict_leaves=strict_leaves,
+            relations=relations,
+            edgelabels=edgelabels,
+            included_namespaces=included_namespaces,
+            included_ontologies=included_ontologies,
+        )
+        if legend:
+            graph.add_legend()
+
+        if not width:
+            figwidth, _ = graph.get_figsize()
+            width = self.figscale * figwidth
+            if self.maxwidth and width > self.maxwidth:
+                width = self.maxwidth
+
+        filepath = os.path.join(self.basedir, path)
+        destdir = os.path.dirname(filepath)
+        if not os.path.exists(destdir):
+            os.makedirs(destdir)
+        graph.save(filepath, fmt=fmt)
+        return filepath, leaves, width
+
+    def process_branchfigs(self):
+        """Process all %BRANCHFIG directives."""
+        for i, line in reversed(list(enumerate(self.lines))):
+            if line.startswith("%BRANCHFIG "):
+                tokens = shlex.split(line)
+                name = tokens[1]
+                opts = get_options(
+                    tokens[2:],
+                    path="",
+                    caption="",
+                    terminated=1,
+                    include_leaves=1,
+                    strict_leaves=1,
+                    width=0,
+                    leaves="",
+                    relations="all",
+                    edgelabels=0,
+                    rankdir="BT",
+                    legend=1,
+                    namespaces="",
+                    ontologies="",
+                )
+
+                included_namespaces = (
+                    opts.namespaces.split(",")
+                    if opts.namespaces
+                    else ()  # pylint: disable=no-member
+                )
+                included_ontologies = (
+                    opts.ontologies.split(",")
+                    if opts.ontologies
+                    else ()  # pylint: disable=no-member
+                )
+
+                filepath, _, width = self._make_branchfig(
+                    name,
+                    opts.path,  # pylint: disable=no-member
+                    opts.terminated,  # pylint: disable=no-member
+                    opts.include_leaves,  # pylint: disable=no-member
+                    opts.strict_leaves,  # pylint: disable=no-member
+                    opts.width,  # pylint: disable=no-member
+                    opts.leaves,  # pylint: disable=no-member
+                    opts.relations,  # pylint: disable=no-member
+                    opts.edgelabels,  # pylint: disable=no-member
+                    opts.rankdir,  # pylint: disable=no-member
+                    opts.legend,  # pylint: disable=no-member
+                    included_namespaces,
+                    included_ontologies,
+                )
+
+                del self.lines[i]
+                self.lines[i:i] = self.ontodoc.get_figure(
+                    filepath,
+                    caption=opts.caption,
+                    width=width,  # pylint: disable=no-member
+                ).split("\n")
+
+    def process_branchdocs(self):  # pylint: disable=too-many-locals
+        """Process all %BRANCHDOC and  %BRANCHEAD directives."""
+        onto = self.ontodoc.onto
+        for i, line in reversed(list(enumerate(self.lines))):
+            if line.startswith("%BRANCHDOC ") or line.startswith(
+                "%BRANCHHEAD "
+            ):
+                with_branch = bool(line.startswith("%BRANCHDOC "))
+                tokens = shlex.split(line)
+                name = tokens[1]
+                title = camelsplit(name)
+                title = title[0].upper() + title[1:] + " branch"
+                opts = get_options(
+                    tokens[2:],
+                    level=2,
+                    path="",
+                    title=title,
+                    caption=title + ".",
+                    terminated=1,
+                    strict_leaves=1,
+                    width=0,
+                    leaves="",
+                    relations="all",
+                    edgelabels=0,
+                    rankdir="BT",
+                    legend=1,
+                    namespaces="",
+                    ontologies="",
+                )
+
+                included_namespaces = (
+                    opts.namespaces.split(",")
+                    if opts.namespaces
+                    else ()  # pylint: disable=no-member
+                )
+                included_ontologies = (
+                    opts.ontologies.split(",")
+                    if opts.ontologies
+                    else ()  # pylint: disable=no-member
+                )
+
+                include_leaves = 1
+                filepath, leaves, width = self._make_branchfig(
+                    name,
+                    opts.path,  # pylint: disable=no-member
+                    opts.terminated,  # pylint: disable=no-member
+                    include_leaves,
+                    opts.strict_leaves,  # pylint: disable=no-member
+                    opts.width,  # pylint: disable=no-member
+                    opts.leaves,  # pylint: disable=no-member
+                    opts.relations,  # pylint: disable=no-member
+                    opts.edgelabels,  # pylint: disable=no-member
+                    opts.rankdir,  # pylint: disable=no-member
+                    opts.legend,  # pylint: disable=no-member
+                    included_namespaces,
+                    included_ontologies,
+                )
+
+                sec = []
+                sec.append(
+                    self.ontodoc.get_header(opts.title, int(opts.level))
+                )  # pylint: disable=no-member
+                sec.append(
+                    self.ontodoc.get_figure(
+                        filepath,
+                        caption=opts.caption,
+                        width=width,  # pylint: disable=no-member
+                    )
+                )
+                if with_branch:
+                    include_leaves = 0
+                    branch = filter_classes(
+                        onto.get_branch(name, leaves, include_leaves),
+                        included_namespaces=included_namespaces,
+                        included_ontologies=included_ontologies,
+                    )
+                    sec.append(
+                        self.ontodoc.itemsdoc(
+                            branch, int(opts.level + 1)
+                        )  # pylint: disable=no-member
+                    )
+
+                del self.lines[i]
+                self.lines[i:i] = sec
+
+    def process_alls(self):
+        """Expand all %ALL specifications."""
+        onto = self.ontodoc.onto
+        for i, line in reversed(list(enumerate(self.lines))):
+            if line.startswith("%ALL "):
+                tokens = shlex.split(line)
+                token = tokens[1]
+                opts = get_options(tokens[2:], header_level=3)
+                if token == "classes":  # nosec
+                    items = onto.classes(imported=self.imported)
+                elif token in ("object_properties", "relations"):
+                    items = onto.object_properties(imported=self.imported)
+                elif token == "data_properties":  # nosec
+                    items = onto.data_properties(imported=self.imported)
+                elif token == "annotation_properties":  # nosec
+                    items = onto.annotation_properties(imported=self.imported)
+                elif token == "individuals":  # nosec
+                    items = onto.individuals(imported=self.imported)
+                else:
+                    raise InvalidTemplateError(
+                        f"Invalid argument to %%ALL: {token}"
+                    )
+                items = sorted(items, key=get_label)
+                del self.lines[i]
+                self.lines[i:i] = self.ontodoc.itemsdoc(
+                    items, int(opts.header_level)  # pylint: disable=no-member
+                ).split("\n")
+
+    def process_allfig(self):  # pylint: disable=too-many-locals
+        """Process all %ALLFIG directives."""
+        onto = self.ontodoc.onto
+        for i, line in reversed(list(enumerate(self.lines))):
+            if line.startswith("%ALLFIG "):
+                tokens = shlex.split(line)
+                token = tokens[1]
+                opts = get_options(
+                    tokens[2:],
+                    path="",
+                    level=3,
+                    terminated=0,
+                    include_leaves=1,
+                    strict_leaves=1,
+                    width=0,
+                    leaves="",
+                    relations="isA",
+                    edgelabels=0,
+                    rankdir="BT",
+                    legend=1,
+                    namespaces="",
+                    ontologies="",
+                )
+                if token == "classes":  # nosec
+                    roots = onto.get_root_classes(imported=self.imported)
+                elif token in ("object_properties", "relations"):
+                    roots = onto.get_root_object_properties(
+                        imported=self.imported
+                    )
+                elif token == "data_properties":  # nosec
+                    roots = onto.get_root_data_properties(
+                        imported=self.imported
+                    )
+                else:
+                    raise InvalidTemplateError(
+                        f"Invalid argument to %%ALLFIG: {token}"
+                    )
+
+                included_namespaces = (
+                    opts.namespaces.split(",")
+                    if opts.namespaces
+                    else ()  # pylint: disable=no-member
+                )
+                included_ontologies = (
+                    opts.ontologies.split(",")
+                    if opts.ontologies
+                    else ()  # pylint: disable=no-member
+                )
+
+                sec = []
+                for root in roots:
+                    name = asstring(root, link="{label}", ontology=onto)
+                    filepath, _, width = self._make_branchfig(
+                        name,
+                        opts.path,  # pylint: disable=no-member
+                        opts.terminated,  # pylint: disable=no-member
+                        opts.include_leaves,  # pylint: disable=no-member
+                        opts.strict_leaves,  # pylint: disable=no-member
+                        opts.width,  # pylint: disable=no-member
+                        opts.leaves,  # pylint: disable=no-member
+                        opts.relations,  # pylint: disable=no-member
+                        opts.edgelabels,  # pylint: disable=no-member
+                        opts.rankdir,  # pylint: disable=no-member
+                        opts.legend,  # pylint: disable=no-member
+                        included_namespaces,
+                        included_ontologies,
+                    )
+                    title = f"Taxonomy of {name}."
+                    sec.append(
+                        self.ontodoc.get_header(title, int(opts.level))
+                    )  # pylint: disable=no-member
+                    sec.extend(
+                        self.ontodoc.get_figure(
+                            filepath, caption=title, width=width
+                        ).split("\n")
+                    )
+
+                del self.lines[i]
+                self.lines[i:i] = sec
+
+    def process_includes(self):
+        """Process all %INCLUDE directives."""
+        for i, line in reversed(list(enumerate(self.lines))):
+            if line.startswith("%INCLUDE "):
+                tokens = shlex.split(line)
+                filepath = tokens[1]
+                opts = get_options(tokens[2:], shift=0)
+                with open(
+                    os.path.join(self.basedir, filepath), "rt", encoding="utf8"
+                ) as handle:
+                    docpp = DocPP(
+                        handle.read(),
+                        self.ontodoc,
+                        basedir=os.path.dirname(filepath),
+                        figformat=self.figformat,
+                        figscale=self.figscale,
+                        maxwidth=self.maxwidth,
+                    )
+                    docpp.figdir = self.figdir
+                if opts.shift:  # pylint: disable=no-member
+                    docpp.shift_header_levels(
+                        int(opts.shift)
+                    )  # pylint: disable=no-member
+                docpp.process()
+                del self.lines[i]
+                self.lines[i:i] = docpp.lines
+
+    def process(self):
+        """Perform all pre-processing steps."""
+        if not self._processed:
+            self.process_comments()
+            self.process_headers()
+            self.process_figures()
+            self.process_entities()
+            self.process_branches()
+            self.process_branchfigs()
+            self.process_branchdocs()
+            self.process_alls()
+            self.process_allfig()
+            self.process_includes()
+            self._processed = True
+
+    def write(  # pylint: disable=too-many-arguments
+        self,
+        outfile,
+        fmt=None,
+        pandoc_option_files=(),
+        pandoc_options=(),
+        genfile=None,
+        verbose=True,
+    ):
+        """Writes documentation to `outfile`.
+
+        Parameters
+        ----------
+        outfile : str
+            File that the documentation is written to.
+        fmt : str
+            Output format.  If it is "md" or "simple-html",
+            the built-in template generator is used.  Otherwise
+            pandoc is used.  If not given, the format is inferred
+            from the `outfile` name extension.
+        pandoc_option_files : sequence
+            Sequence with command line arguments provided to pandoc.
+        pandoc_options : sequence
+            Additional pandoc options overriding options read from
+        `pandoc_option_files`.
+        genfile : str
+            Store temporary generated markdown input file to pandoc
+            to this file (for debugging).
+        verbose : bool
+            Whether to show some messages when running pandoc.
+        """
+        self.process()
+        content = self.get_buffer()
+
+        substitutions = self.ontodoc.style.get("substitutions", [])
+        for reg, sub in substitutions:
+            content = re.sub(reg, sub, content)
+
+        fmt = get_format(outfile, default="html", fmt=fmt)
+        if fmt not in ("simple-html", "markdown", "md"):  # Run pandoc
+            if not genfile:
+                with NamedTemporaryFile(mode="w+t", suffix=".md") as temp_file:
+                    temp_file.write(content)
+                    temp_file.flush()
+                    genfile = temp_file.name
+
+                    run_pandoc(
+                        genfile,
+                        outfile,
+                        fmt,
+                        pandoc_option_files=pandoc_option_files,
+                        pandoc_options=pandoc_options,
+                        verbose=verbose,
+                    )
+            else:
+                with open(genfile, "wt") as handle:
+                    handle.write(content)
+
+                run_pandoc(
+                    genfile,
+                    outfile,
+                    fmt,
+                    pandoc_option_files=pandoc_option_files,
+                    pandoc_options=pandoc_options,
+                    verbose=verbose,
+                )
+        else:
+            if verbose:
+                print("Writing:", outfile)
+            with open(outfile, "wt") as handle:
+                handle.write(content)
+
+
+ + + +
+ + + + + + + + + + +
+ + + +

+copy(self) + + +

+ +
+ +

Returns a copy of self.

+ +
+ Source code in ontopy/ontodoc.py +
def copy(self):
+    """Returns a copy of self."""
+    docpp = DocPP(
+        "",
+        self.ontodoc,
+        self.basedir,
+        figformat=self.figformat,
+        figscale=self.figscale,
+        maxwidth=self.maxwidth,
+    )
+    docpp.lines[:] = self.lines
+    docpp.figdir = self.figdir
+    return docpp
+
+
+
+ +
+ + + +
+ + + +

+get_branches(self) + + +

+ +
+ +

Returns a list with all branch names as specified with %BRANCH +(in current and all included documents). The returned value is +cached for efficiency purposes and so that it is not lost after +processing branches.

+ +
+ Source code in ontopy/ontodoc.py +
def get_branches(self):
+    """Returns a list with all branch names as specified with %BRANCH
+    (in current and all included documents).  The returned value is
+    cached for efficiency purposes and so that it is not lost after
+    processing branches."""
+    if self._branch_cache is None:
+        names = []
+        docpp = self.copy()
+        docpp.process_includes()
+        for line in docpp.lines:
+            if line.startswith("%BRANCH"):
+                names.append(shlex.split(line)[1])
+        self._branch_cache = names
+    return self._branch_cache
+
+
+
+ +
+ + + +
+ + + +

+get_buffer(self) + + +

+ +
+ +

Returns the current buffer.

+ +
+ Source code in ontopy/ontodoc.py +
def get_buffer(self):
+    """Returns the current buffer."""
+    return "\n".join(self.lines)
+
+
+
+ +
+ + + +
+ + + +

+process(self) + + +

+ +
+ +

Perform all pre-processing steps.

+ +
+ Source code in ontopy/ontodoc.py +
def process(self):
+    """Perform all pre-processing steps."""
+    if not self._processed:
+        self.process_comments()
+        self.process_headers()
+        self.process_figures()
+        self.process_entities()
+        self.process_branches()
+        self.process_branchfigs()
+        self.process_branchdocs()
+        self.process_alls()
+        self.process_allfig()
+        self.process_includes()
+        self._processed = True
+
+
+
+ +
+ + + +
+ + + +

+process_allfig(self) + + +

+ +
+ +

Process all %ALLFIG directives.

+ +
+ Source code in ontopy/ontodoc.py +
def process_allfig(self):  # pylint: disable=too-many-locals
+    """Process all %ALLFIG directives."""
+    onto = self.ontodoc.onto
+    for i, line in reversed(list(enumerate(self.lines))):
+        if line.startswith("%ALLFIG "):
+            tokens = shlex.split(line)
+            token = tokens[1]
+            opts = get_options(
+                tokens[2:],
+                path="",
+                level=3,
+                terminated=0,
+                include_leaves=1,
+                strict_leaves=1,
+                width=0,
+                leaves="",
+                relations="isA",
+                edgelabels=0,
+                rankdir="BT",
+                legend=1,
+                namespaces="",
+                ontologies="",
+            )
+            if token == "classes":  # nosec
+                roots = onto.get_root_classes(imported=self.imported)
+            elif token in ("object_properties", "relations"):
+                roots = onto.get_root_object_properties(
+                    imported=self.imported
+                )
+            elif token == "data_properties":  # nosec
+                roots = onto.get_root_data_properties(
+                    imported=self.imported
+                )
+            else:
+                raise InvalidTemplateError(
+                    f"Invalid argument to %%ALLFIG: {token}"
+                )
+
+            included_namespaces = (
+                opts.namespaces.split(",")
+                if opts.namespaces
+                else ()  # pylint: disable=no-member
+            )
+            included_ontologies = (
+                opts.ontologies.split(",")
+                if opts.ontologies
+                else ()  # pylint: disable=no-member
+            )
+
+            sec = []
+            for root in roots:
+                name = asstring(root, link="{label}", ontology=onto)
+                filepath, _, width = self._make_branchfig(
+                    name,
+                    opts.path,  # pylint: disable=no-member
+                    opts.terminated,  # pylint: disable=no-member
+                    opts.include_leaves,  # pylint: disable=no-member
+                    opts.strict_leaves,  # pylint: disable=no-member
+                    opts.width,  # pylint: disable=no-member
+                    opts.leaves,  # pylint: disable=no-member
+                    opts.relations,  # pylint: disable=no-member
+                    opts.edgelabels,  # pylint: disable=no-member
+                    opts.rankdir,  # pylint: disable=no-member
+                    opts.legend,  # pylint: disable=no-member
+                    included_namespaces,
+                    included_ontologies,
+                )
+                title = f"Taxonomy of {name}."
+                sec.append(
+                    self.ontodoc.get_header(title, int(opts.level))
+                )  # pylint: disable=no-member
+                sec.extend(
+                    self.ontodoc.get_figure(
+                        filepath, caption=title, width=width
+                    ).split("\n")
+                )
+
+            del self.lines[i]
+            self.lines[i:i] = sec
+
+
+
+ +
+ + + +
+ + + +

+process_alls(self) + + +

+ +
+ +

Expand all %ALL specifications.

+ +
+ Source code in ontopy/ontodoc.py +
def process_alls(self):
+    """Expand all %ALL specifications."""
+    onto = self.ontodoc.onto
+    for i, line in reversed(list(enumerate(self.lines))):
+        if line.startswith("%ALL "):
+            tokens = shlex.split(line)
+            token = tokens[1]
+            opts = get_options(tokens[2:], header_level=3)
+            if token == "classes":  # nosec
+                items = onto.classes(imported=self.imported)
+            elif token in ("object_properties", "relations"):
+                items = onto.object_properties(imported=self.imported)
+            elif token == "data_properties":  # nosec
+                items = onto.data_properties(imported=self.imported)
+            elif token == "annotation_properties":  # nosec
+                items = onto.annotation_properties(imported=self.imported)
+            elif token == "individuals":  # nosec
+                items = onto.individuals(imported=self.imported)
+            else:
+                raise InvalidTemplateError(
+                    f"Invalid argument to %%ALL: {token}"
+                )
+            items = sorted(items, key=get_label)
+            del self.lines[i]
+            self.lines[i:i] = self.ontodoc.itemsdoc(
+                items, int(opts.header_level)  # pylint: disable=no-member
+            ).split("\n")
+
+
+
+ +
+ + + +
+ + + +

+process_branchdocs(self) + + +

+ +
+ +

Process all %BRANCHDOC and %BRANCHEAD directives.

+ +
+ Source code in ontopy/ontodoc.py +
def process_branchdocs(self):  # pylint: disable=too-many-locals
+    """Process all %BRANCHDOC and  %BRANCHEAD directives."""
+    onto = self.ontodoc.onto
+    for i, line in reversed(list(enumerate(self.lines))):
+        if line.startswith("%BRANCHDOC ") or line.startswith(
+            "%BRANCHHEAD "
+        ):
+            with_branch = bool(line.startswith("%BRANCHDOC "))
+            tokens = shlex.split(line)
+            name = tokens[1]
+            title = camelsplit(name)
+            title = title[0].upper() + title[1:] + " branch"
+            opts = get_options(
+                tokens[2:],
+                level=2,
+                path="",
+                title=title,
+                caption=title + ".",
+                terminated=1,
+                strict_leaves=1,
+                width=0,
+                leaves="",
+                relations="all",
+                edgelabels=0,
+                rankdir="BT",
+                legend=1,
+                namespaces="",
+                ontologies="",
+            )
+
+            included_namespaces = (
+                opts.namespaces.split(",")
+                if opts.namespaces
+                else ()  # pylint: disable=no-member
+            )
+            included_ontologies = (
+                opts.ontologies.split(",")
+                if opts.ontologies
+                else ()  # pylint: disable=no-member
+            )
+
+            include_leaves = 1
+            filepath, leaves, width = self._make_branchfig(
+                name,
+                opts.path,  # pylint: disable=no-member
+                opts.terminated,  # pylint: disable=no-member
+                include_leaves,
+                opts.strict_leaves,  # pylint: disable=no-member
+                opts.width,  # pylint: disable=no-member
+                opts.leaves,  # pylint: disable=no-member
+                opts.relations,  # pylint: disable=no-member
+                opts.edgelabels,  # pylint: disable=no-member
+                opts.rankdir,  # pylint: disable=no-member
+                opts.legend,  # pylint: disable=no-member
+                included_namespaces,
+                included_ontologies,
+            )
+
+            sec = []
+            sec.append(
+                self.ontodoc.get_header(opts.title, int(opts.level))
+            )  # pylint: disable=no-member
+            sec.append(
+                self.ontodoc.get_figure(
+                    filepath,
+                    caption=opts.caption,
+                    width=width,  # pylint: disable=no-member
+                )
+            )
+            if with_branch:
+                include_leaves = 0
+                branch = filter_classes(
+                    onto.get_branch(name, leaves, include_leaves),
+                    included_namespaces=included_namespaces,
+                    included_ontologies=included_ontologies,
+                )
+                sec.append(
+                    self.ontodoc.itemsdoc(
+                        branch, int(opts.level + 1)
+                    )  # pylint: disable=no-member
+                )
+
+            del self.lines[i]
+            self.lines[i:i] = sec
+
+
+
+ +
+ + + +
+ + + +

+process_branches(self) + + +

+ +
+ +

Expand all %BRANCH specifications.

+ +
+ Source code in ontopy/ontodoc.py +
def process_branches(self):
+    """Expand all %BRANCH specifications."""
+    onto = self.ontodoc.onto
+
+    # Get all branch names in final document
+    names = self.get_branches()
+    for i, line in reversed(list(enumerate(self.lines))):
+        if line.startswith("%BRANCH "):
+            tokens = shlex.split(line)
+            name = tokens[1]
+            opts = get_options(
+                tokens[2:],
+                header_level=3,
+                terminated=1,
+                include_leaves=0,
+                namespaces="",
+                ontologies="",
+            )
+            leaves = (
+                names if opts.terminated else ()
+            )  # pylint: disable=no-member
+
+            included_namespaces = (
+                opts.namespaces.split(",")
+                if opts.namespaces
+                else ()  # pylint: disable=no-member
+            )
+            included_ontologies = (
+                opts.ontologies.split(",")
+                if opts.ontologies
+                else ()  # pylint: disable=no-member
+            )
+
+            branch = filter_classes(
+                onto.get_branch(
+                    name, leaves, opts.include_leaves
+                ),  # pylint: disable=no-member
+                included_namespaces=included_namespaces,
+                included_ontologies=included_ontologies,
+            )
+
+            del self.lines[i]
+            self.lines[i:i] = self.ontodoc.itemsdoc(
+                branch, int(opts.header_level)  # pylint: disable=no-member
+            ).split("\n")
+
+
+
+ +
+ + + +
+ + + +

+process_branchfigs(self) + + +

+ +
+ +

Process all %BRANCHFIG directives.

+ +
+ Source code in ontopy/ontodoc.py +
def process_branchfigs(self):
+    """Process all %BRANCHFIG directives."""
+    for i, line in reversed(list(enumerate(self.lines))):
+        if line.startswith("%BRANCHFIG "):
+            tokens = shlex.split(line)
+            name = tokens[1]
+            opts = get_options(
+                tokens[2:],
+                path="",
+                caption="",
+                terminated=1,
+                include_leaves=1,
+                strict_leaves=1,
+                width=0,
+                leaves="",
+                relations="all",
+                edgelabels=0,
+                rankdir="BT",
+                legend=1,
+                namespaces="",
+                ontologies="",
+            )
+
+            included_namespaces = (
+                opts.namespaces.split(",")
+                if opts.namespaces
+                else ()  # pylint: disable=no-member
+            )
+            included_ontologies = (
+                opts.ontologies.split(",")
+                if opts.ontologies
+                else ()  # pylint: disable=no-member
+            )
+
+            filepath, _, width = self._make_branchfig(
+                name,
+                opts.path,  # pylint: disable=no-member
+                opts.terminated,  # pylint: disable=no-member
+                opts.include_leaves,  # pylint: disable=no-member
+                opts.strict_leaves,  # pylint: disable=no-member
+                opts.width,  # pylint: disable=no-member
+                opts.leaves,  # pylint: disable=no-member
+                opts.relations,  # pylint: disable=no-member
+                opts.edgelabels,  # pylint: disable=no-member
+                opts.rankdir,  # pylint: disable=no-member
+                opts.legend,  # pylint: disable=no-member
+                included_namespaces,
+                included_ontologies,
+            )
+
+            del self.lines[i]
+            self.lines[i:i] = self.ontodoc.get_figure(
+                filepath,
+                caption=opts.caption,
+                width=width,  # pylint: disable=no-member
+            ).split("\n")
+
+
+
+ +
+ + + +
+ + + +

+process_comments(self) + + +

+ +
+ +

Strips out comment lines starting with "%%".

+ +
+ Source code in ontopy/ontodoc.py +
def process_comments(self):
+    """Strips out comment lines starting with "%%"."""
+    self.lines = [line for line in self.lines if not line.startswith("%%")]
+
+
+
+ +
+ + + +
+ + + +

+process_entities(self) + + +

+ +
+ +

Expand all %ENTITY specifications.

+ +
+ Source code in ontopy/ontodoc.py +
def process_entities(self):
+    """Expand all %ENTITY specifications."""
+    for i, line in reversed(list(enumerate(self.lines))):
+        if line.startswith("%ENTITY "):
+            tokens = shlex.split(line)
+            name = tokens[1]
+            opts = get_options(tokens[2:], header_level=3)
+            del self.lines[i]
+            self.lines[i:i] = self.ontodoc.itemdoc(
+                name, int(opts.header_level)  # pylint: disable=no-member
+            ).split("\n")
+
+
+
+ +
+ + + +
+ + + +

+process_figures(self) + + +

+ +
+ +

Expand all %FIGURE specifications.

+ +
+ Source code in ontopy/ontodoc.py +
def process_figures(self):
+    """Expand all %FIGURE specifications."""
+    for i, line in reversed(list(enumerate(self.lines))):
+        if line.startswith("%FIGURE "):
+            tokens = shlex.split(line)
+            path = tokens[1]
+            opts = get_options(tokens[2:], caption="", width=0)
+            del self.lines[i]
+            self.lines[i:i] = self.ontodoc.get_figure(
+                os.path.join(self.basedir, path),
+                caption=opts.caption,  # pylint: disable=no-member
+                width=opts.width,  # pylint: disable=no-member
+            ).split("\n")
+
+
+
+ +
+ + + +
+ + + +

+process_headers(self) + + +

+ +
+ +

Expand all %HEADER specifications.

+ +
+ Source code in ontopy/ontodoc.py +
def process_headers(self):
+    """Expand all %HEADER specifications."""
+    for i, line in reversed(list(enumerate(self.lines))):
+        if line.startswith("%HEADER "):
+            tokens = shlex.split(line)
+            name = tokens[1]
+            opts = get_options(tokens[2:], level=1)
+            del self.lines[i]
+            self.lines[i:i] = self.ontodoc.get_header(
+                name, int(opts.level)  # pylint: disable=no-member
+            ).split("\n")
+
+
+
+ +
+ + + +
+ + + +

+process_includes(self) + + +

+ +
+ +

Process all %INCLUDE directives.

+ +
+ Source code in ontopy/ontodoc.py +
def process_includes(self):
+    """Process all %INCLUDE directives."""
+    for i, line in reversed(list(enumerate(self.lines))):
+        if line.startswith("%INCLUDE "):
+            tokens = shlex.split(line)
+            filepath = tokens[1]
+            opts = get_options(tokens[2:], shift=0)
+            with open(
+                os.path.join(self.basedir, filepath), "rt", encoding="utf8"
+            ) as handle:
+                docpp = DocPP(
+                    handle.read(),
+                    self.ontodoc,
+                    basedir=os.path.dirname(filepath),
+                    figformat=self.figformat,
+                    figscale=self.figscale,
+                    maxwidth=self.maxwidth,
+                )
+                docpp.figdir = self.figdir
+            if opts.shift:  # pylint: disable=no-member
+                docpp.shift_header_levels(
+                    int(opts.shift)
+                )  # pylint: disable=no-member
+            docpp.process()
+            del self.lines[i]
+            self.lines[i:i] = docpp.lines
+
+
+
+ +
+ + + +
+ + + +

+shift_header_levels(self, shift) + + +

+ +
+ +

Shift header level of all hashtag-headers in buffer. Underline +headers are ignored.

+ +
+ Source code in ontopy/ontodoc.py +
def shift_header_levels(self, shift):
+    """Shift header level of all hashtag-headers in buffer.  Underline
+    headers are ignored."""
+    if not shift:
+        return
+    pat = re.compile("^#+ ")
+    for i, line in enumerate(self.lines):
+        match = pat.match(line)
+        if match:
+            if shift > 0:
+                self.lines[i] = "#" * shift + line
+            elif shift < 0:
+                counter = match.end()
+                if shift > counter:
+                    self.lines[i] = line.lstrip("# ")
+                else:
+                    self.lines[i] = line[counter:]
+
+
+
+ +
+ + + +
+ + + +

+write(self, outfile, fmt=None, pandoc_option_files=(), pandoc_options=(), genfile=None, verbose=True) + + +

+ +
+ +

Writes documentation to outfile.

+
Parameters
+

outfile : str + File that the documentation is written to. +fmt : str + Output format. If it is "md" or "simple-html", + the built-in template generator is used. Otherwise + pandoc is used. If not given, the format is inferred + from the outfile name extension. +pandoc_option_files : sequence + Sequence with command line arguments provided to pandoc. +pandoc_options : sequence + Additional pandoc options overriding options read from +pandoc_option_files. +genfile : str + Store temporary generated markdown input file to pandoc + to this file (for debugging). +verbose : bool + Whether to show some messages when running pandoc.

+ +
+ Source code in ontopy/ontodoc.py +
def write(  # pylint: disable=too-many-arguments
+    self,
+    outfile,
+    fmt=None,
+    pandoc_option_files=(),
+    pandoc_options=(),
+    genfile=None,
+    verbose=True,
+):
+    """Writes documentation to `outfile`.
+
+    Parameters
+    ----------
+    outfile : str
+        File that the documentation is written to.
+    fmt : str
+        Output format.  If it is "md" or "simple-html",
+        the built-in template generator is used.  Otherwise
+        pandoc is used.  If not given, the format is inferred
+        from the `outfile` name extension.
+    pandoc_option_files : sequence
+        Sequence with command line arguments provided to pandoc.
+    pandoc_options : sequence
+        Additional pandoc options overriding options read from
+    `pandoc_option_files`.
+    genfile : str
+        Store temporary generated markdown input file to pandoc
+        to this file (for debugging).
+    verbose : bool
+        Whether to show some messages when running pandoc.
+    """
+    self.process()
+    content = self.get_buffer()
+
+    substitutions = self.ontodoc.style.get("substitutions", [])
+    for reg, sub in substitutions:
+        content = re.sub(reg, sub, content)
+
+    fmt = get_format(outfile, default="html", fmt=fmt)
+    if fmt not in ("simple-html", "markdown", "md"):  # Run pandoc
+        if not genfile:
+            with NamedTemporaryFile(mode="w+t", suffix=".md") as temp_file:
+                temp_file.write(content)
+                temp_file.flush()
+                genfile = temp_file.name
+
+                run_pandoc(
+                    genfile,
+                    outfile,
+                    fmt,
+                    pandoc_option_files=pandoc_option_files,
+                    pandoc_options=pandoc_options,
+                    verbose=verbose,
+                )
+        else:
+            with open(genfile, "wt") as handle:
+                handle.write(content)
+
+            run_pandoc(
+                genfile,
+                outfile,
+                fmt,
+                pandoc_option_files=pandoc_option_files,
+                pandoc_options=pandoc_options,
+                verbose=verbose,
+            )
+    else:
+        if verbose:
+            print("Writing:", outfile)
+        with open(outfile, "wt") as handle:
+            handle.write(content)
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + +
+ + + +

+ +InvalidTemplateError (NameError) + + + + +

+ +
+ +

Raised on errors in template files.

+ +
+ Source code in ontopy/ontodoc.py +
class InvalidTemplateError(NameError):
+    """Raised on errors in template files."""
+
+
+ + +
+ +
+ + + +
+ + + +

+ +OntoDoc + + + +

+ +
+ +

A class for helping documentating ontologies.

+

Parameters

+

onto : Ontology instance + The ontology that should be documented. +style : dict | "html" | "markdown" | "markdown_tex" + A dict defining the following template strings (and substitutions):

+
:header: Formats an header.
+    Substitutions: {level}, {label}
+:link: Formats a link.
+   Substitutions: {name}
+:point: Formats a point (list item).
+   Substitutions: {point}, {ontology}
+:points: Formats a list of points.  Used within annotations.
+   Substitutions: {points}, {ontology}
+:annotation: Formats an annotation.
+    Substitutions: {key}, {value}, {ontology}
+:substitutions: list of ``(regex, sub)`` pairs for substituting
+    annotation values.
+
+ +
+ Source code in ontopy/ontodoc.py +
class OntoDoc:
+    """A class for helping documentating ontologies.
+
+    Parameters
+    ----------
+    onto : Ontology instance
+        The ontology that should be documented.
+    style : dict | "html" | "markdown" | "markdown_tex"
+        A dict defining the following template strings (and substitutions):
+
+        :header: Formats an header.
+            Substitutions: {level}, {label}
+        :link: Formats a link.
+           Substitutions: {name}
+        :point: Formats a point (list item).
+           Substitutions: {point}, {ontology}
+        :points: Formats a list of points.  Used within annotations.
+           Substitutions: {points}, {ontology}
+        :annotation: Formats an annotation.
+            Substitutions: {key}, {value}, {ontology}
+        :substitutions: list of ``(regex, sub)`` pairs for substituting
+            annotation values.
+    """
+
+    _markdown_style = {
+        "sep": "\n",
+        "figwidth": "{{ width={width:.0f}px }}",
+        "figure": "![{caption}]({path}){figwidth}\n",
+        "header": "\n{:#<{level}} {label}    {{#{anchor}}}",
+        # Use ref instead of iri for local references in links
+        "link": "[{label}]({ref})",
+        "point": "  - {point}\n",
+        "points": "\n\n{points}\n",
+        "annotation": "**{key}:** {value}\n",
+        "substitutions": [],
+    }
+    # Extra style settings for markdown+tex (e.g. pdf generation with pandoc)
+    _markdown_tex_extra_style = {
+        "substitutions": [
+            # logic/math symbols
+            ("\u2200", r"$\\forall$"),
+            ("\u2203", r"$\\exists$"),
+            ("\u2206", r"$\\nabla$"),
+            ("\u2227", r"$\\land$"),
+            ("\u2228", r"$\\lor$"),
+            ("\u2207", r"$\\nabla$"),
+            ("\u2212", r"-"),
+            ("->", r"$\\rightarrow$"),
+            # uppercase greek letters
+            ("\u0391", r"$\\Upalpha$"),
+            ("\u0392", r"$\\Upbeta$"),
+            ("\u0393", r"$\\Upgamma$"),
+            ("\u0394", r"$\\Updelta$"),
+            ("\u0395", r"$\\Upepsilon$"),
+            ("\u0396", r"$\\Upzeta$"),
+            ("\u0397", r"$\\Upeta$"),
+            ("\u0398", r"$\\Uptheta$"),
+            ("\u0399", r"$\\Upiota$"),
+            ("\u039a", r"$\\Upkappa$"),
+            ("\u039b", r"$\\Uplambda$"),
+            ("\u039c", r"$\\Upmu$"),
+            ("\u039d", r"$\\Upnu$"),
+            ("\u039e", r"$\\Upxi$"),
+            ("\u039f", r"$\\Upomekron$"),
+            ("\u03a0", r"$\\Uppi$"),
+            ("\u03a1", r"$\\Uprho$"),
+            ("\u03a3", r"$\\Upsigma$"),  # no \u0302
+            ("\u03a4", r"$\\Uptau$"),
+            ("\u03a5", r"$\\Upupsilon$"),
+            ("\u03a6", r"$\\Upvarphi$"),
+            ("\u03a7", r"$\\Upchi$"),
+            ("\u03a8", r"$\\Uppsi$"),
+            ("\u03a9", r"$\\Upomega$"),
+            # lowercase greek letters
+            ("\u03b1", r"$\\upalpha$"),
+            ("\u03b2", r"$\\upbeta$"),
+            ("\u03b3", r"$\\upgamma$"),
+            ("\u03b4", r"$\\updelta$"),
+            ("\u03b5", r"$\\upepsilon$"),
+            ("\u03b6", r"$\\upzeta$"),
+            ("\u03b7", r"$\\upeta$"),
+            ("\u03b8", r"$\\uptheta$"),
+            ("\u03b9", r"$\\upiota$"),
+            ("\u03ba", r"$\\upkappa$"),
+            ("\u03bb", r"$\\uplambda$"),
+            ("\u03bc", r"$\\upmu$"),
+            ("\u03bd", r"$\\upnu$"),
+            ("\u03be", r"$\\upxi$"),
+            ("\u03bf", r"o"),  # no \upomicron
+            ("\u03c0", r"$\\uppi$"),
+            ("\u03c1", r"$\\uprho$"),
+            ("\u03c2", r"$\\upvarsigma$"),
+            ("\u03c3", r"$\\upsigma$"),
+            ("\u03c4", r"$\\uptau$"),
+            ("\u03c5", r"$\\upupsilon$"),
+            ("\u03c6", r"$\\upvarphi$"),
+            ("\u03c7", r"$\\upchi$"),
+            ("\u03c8", r"$\\uppsi$"),
+            ("\u03c9", r"$\\upomega$"),
+            # acutes, accents, etc...
+            ("\u03ae", r"$\\acute{\\upeta}$"),
+            ("\u1e17", r"$\\acute{\\bar{\\mathrm{e}}}$"),
+            ("\u03ac", r"$\\acute{\\upalpha}$"),
+            ("\u00e1", r"$\\acute{\\mathrm{a}}$"),
+            ("\u03cc", r"$\\acute{o}$"),  # no \upomicron
+            ("\u014d", r"$\\bar{\\mathrm{o}}$"),
+            ("\u1f45", r"$\\acute{o}$"),  # no \omicron
+        ],
+    }
+    _html_style = {
+        "sep": "<p>\n",
+        "figwidth": 'width="{width:.0f}"',
+        "figure": '<img src="{path}" alt="{caption}"{figwidth}>',
+        "header": '<h{level} id="{anchor}">{label}</h{level}>',
+        "link": '<a href="{ref}">{label}</a>',
+        "point": "      <li>{point}</li>\n",
+        "points": "    <ul>\n      {points}\n    </ul>\n",
+        "annotation": "  <dd><strong>{key}:</strong>\n{value}  </dd>\n",
+        "substitutions": [
+            (r"&", r"&#8210;"),
+            (r"<p>", r"<p>\n\n"),
+            (r"\u2018([^\u2019]*)\u2019", r"<q>\1</q>"),
+            (r"\u2019", r"'"),
+            (r"\u2260", r"&ne;"),
+            (r"\u2264", r"&le;"),
+            (r"\u2265", r"&ge;"),
+            (r"\u226A", r"&x226A;"),
+            (r"\u226B", r"&x226B;"),
+            (r'"Y$', r""),  # strange noice added by owlready2
+        ],
+    }
+
+    def __init__(self, onto, style="markdown"):
+        if isinstance(style, str):
+            if style == "markdown_tex":
+                style = self._markdown_style.copy()
+                style.update(self._markdown_tex_extra_style)
+            else:
+                style = getattr(self, f"_{style}_style")
+        self.onto = onto
+        self.style = style
+        self.url_regex = re.compile(r"https?:\/\/[^\s ]+")
+
+    def get_default_template(self):
+        """Returns default template."""
+        title = os.path.splitext(
+            os.path.basename(self.onto.base_iri.rstrip("/#"))
+        )[0]
+        irilink = self.style.get("link", "{name}").format(
+            iri=self.onto.base_iri,
+            name=self.onto.base_iri,
+            ref=self.onto.base_iri,
+            label=self.onto.base_iri,
+            lowerlabel=self.onto.base_iri,
+        )
+        template = dedent(
+            """\
+        %HEADER {title}
+        Documentation of {irilink}
+
+        %HEADER Relations level=2
+        %ALL object_properties
+
+        %HEADER Classes level=2
+        %ALL classes
+
+        %HEADER Individuals level=2
+        %ALL individuals
+
+        %HEADER Appendix               level=1
+        %HEADER "Relation taxonomies"  level=2
+        %ALLFIG object_properties
+
+        %HEADER "Class taxonomies"     level=2
+        %ALLFIG classes
+        """
+        ).format(ontology=self.onto, title=title, irilink=irilink)
+        return template
+
+    def get_header(self, label, header_level=1, anchor=None):
+        """Returns `label` formatted as a header of given level."""
+        header_style = self.style.get("header", "{label}\n")
+        return header_style.format(
+            "",
+            level=header_level,
+            label=label,
+            anchor=anchor if anchor else label.lower().replace(" ", "-"),
+        )
+
+    def get_figure(self, path, caption="", width=None):
+        """Returns a formatted insert-figure-directive."""
+        figwidth_style = self.style.get("figwidth", "")
+        figure_style = self.style.get("figure", "")
+        figwidth = figwidth_style.format(width=width) if width else ""
+        return figure_style.format(
+            path=path, caption=caption, figwidth=figwidth
+        )
+
+    def itemdoc(
+        self, item, header_level=3, show_disjoints=False
+    ):  # pylint: disable=too-many-locals,too-many-branches,too-many-statements
+        """Returns documentation of `item`.
+
+        Parameters
+        ----------
+        item : obj | label
+            The class, individual or relation to document.
+        header_level : int
+            Header level. Defaults to 3.
+        show_disjoints : Bool
+            Whether to show `disjoint_with` relations.
+        """
+        onto = self.onto
+        if isinstance(item, str):
+            item = self.onto.get_by_label(item)
+
+        header_style = self.style.get("header", "{label}\n")
+        link_style = self.style.get("link", "{name}")
+        point_style = self.style.get("point", "{point}")
+        points_style = self.style.get("points", "{points}")
+        annotation_style = self.style.get("annotation", "{key}: {value}\n")
+        substitutions = self.style.get("substitutions", [])
+
+        # Logical "sorting" of annotations
+        order = {
+            "definition": "00",
+            "axiom": "01",
+            "theorem": "02",
+            "elucidation": "03",
+            "domain": "04",
+            "range": "05",
+            "example": "06",
+        }
+
+        doc = []
+
+        # Header
+        label = get_label(item)
+        iriname = item.iri.partition("#")[2]
+        anchor = iriname if iriname else label.lower()
+        doc.append(
+            header_style.format(
+                "",
+                level=header_level,
+                label=label,
+                anchor=anchor,
+            )
+        )
+
+        # Add warning about missing prefLabel
+        if not hasattr(item, "prefLabel") or not item.prefLabel.first():
+            doc.append(
+                annotation_style.format(
+                    key="Warning", value="Missing prefLabel"
+                )
+            )
+
+        # Add iri
+        doc.append(
+            annotation_style.format(
+                key="IRI",
+                value=asstring(item.iri, link_style, ontology=onto),
+                ontology=onto,
+            )
+        )
+
+        # Add annotations
+        if isinstance(item, owlready2.Thing):
+            annotations = item.get_individual_annotations()
+        else:
+            annotations = item.get_annotations()
+
+        for key in sorted(
+            annotations.keys(), key=lambda key: order.get(key, key)
+        ):
+            for value in annotations[key]:
+                value = str(value)
+                if self.url_regex.match(value):
+                    doc.append(
+                        annotation_style.format(
+                            key=key,
+                            value=asstring(value, link_style, ontology=onto),
+                        )
+                    )
+                else:
+                    for reg, sub in substitutions:
+                        value = re.sub(reg, sub, value)
+                    doc.append(annotation_style.format(key=key, value=value))
+
+        # ...add relations from is_a
+        points = []
+        non_prop = (
+            owlready2.ThingClass,  # owlready2.Restriction,
+            owlready2.And,
+            owlready2.Or,
+            owlready2.Not,
+        )
+        for prop in item.is_a:
+            if isinstance(prop, non_prop) or (
+                isinstance(item, owlready2.PropertyClass)
+                and isinstance(prop, owlready2.PropertyClass)
+            ):
+                points.append(
+                    point_style.format(
+                        point="is_a "
+                        + asstring(prop, link_style, ontology=onto),
+                        ontology=onto,
+                    )
+                )
+            else:
+                points.append(
+                    point_style.format(
+                        point=asstring(prop, link_style, ontology=onto),
+                        ontology=onto,
+                    )
+                )
+
+        # ...add equivalent_to relations
+        for entity in item.equivalent_to:
+            points.append(
+                point_style.format(
+                    point="equivalent_to "
+                    + asstring(entity, link_style, ontology=onto)
+                )
+            )
+
+        # ...add disjoint_with relations
+        if show_disjoints and hasattr(item, "disjoint_with"):
+            subjects = set(item.disjoint_with(reduce=True))
+            points.append(
+                point_style.format(
+                    point="disjoint_with "
+                    + ", ".join(
+                        asstring(s, link_style, ontology=onto) for s in subjects
+                    ),
+                    ontology=onto,
+                )
+            )
+
+        # ...add disjoint_unions
+        if hasattr(item, "disjoint_unions"):
+            for unions in item.disjoint_unions:
+                string = ", ".join(
+                    asstring(u, link_style, ontology=onto) for u in unions
+                )
+                points.append(
+                    point_style.format(
+                        point=f"disjoint_union_of {string}", ontology=onto
+                    )
+                )
+
+        # ...add inverse_of relations
+        if hasattr(item, "inverse_property") and item.inverse_property:
+            points.append(
+                point_style.format(
+                    point="inverse_of "
+                    + asstring(item.inverse_property, link_style, ontology=onto)
+                )
+            )
+
+        # ...add domain restrictions
+        for domain in getattr(item, "domain", ()):
+            points.append(
+                point_style.format(
+                    point="domain "
+                    + asstring(domain, link_style, ontology=onto)
+                )
+            )
+
+        # ...add range restrictions
+        for restriction in getattr(item, "range", ()):
+            points.append(
+                point_style.format(
+                    point="range "
+                    + asstring(restriction, link_style, ontology=onto)
+                )
+            )
+
+        # Add points (from is_a)
+        if points:
+            value = points_style.format(points="".join(points), ontology=onto)
+            doc.append(
+                annotation_style.format(
+                    key="Subclass of", value=value, ontology=onto
+                )
+            )
+
+        # Instances (individuals)
+        if hasattr(item, "instances"):
+            points = []
+
+            for instance in item.instances():
+                if isinstance(instance.is_instance_of, property):
+                    warnings.warn(
+                        f'Ignoring instance "{instance}" which is both and '
+                        "indivudual and class. Ontodoc does not support "
+                        "punning at the present moment."
+                    )
+                    continue
+                if item in instance.is_instance_of:
+                    points.append(
+                        point_style.format(
+                            point=asstring(instance, link_style, ontology=onto),
+                            ontology=onto,
+                        )
+                    )
+                if points:
+                    value = points_style.format(
+                        points="".join(points), ontology=onto
+                    )
+                    doc.append(
+                        annotation_style.format(
+                            key="Individuals", value=value, ontology=onto
+                        )
+                    )
+
+        return "\n".join(doc)
+
+    def itemsdoc(self, items, header_level=3):
+        """Returns documentation of `items`."""
+        sep_style = self.style.get("sep", "\n")
+        doc = []
+        for item in items:
+            doc.append(self.itemdoc(item, header_level))
+            doc.append(sep_style.format(ontology=self.onto))
+        return "\n".join(doc)
+
+
+ + + +
+ + + + + + + + + + +
+ + + +

+get_default_template(self) + + +

+ +
+ +

Returns default template.

+ +
+ Source code in ontopy/ontodoc.py +
def get_default_template(self):
+    """Returns default template."""
+    title = os.path.splitext(
+        os.path.basename(self.onto.base_iri.rstrip("/#"))
+    )[0]
+    irilink = self.style.get("link", "{name}").format(
+        iri=self.onto.base_iri,
+        name=self.onto.base_iri,
+        ref=self.onto.base_iri,
+        label=self.onto.base_iri,
+        lowerlabel=self.onto.base_iri,
+    )
+    template = dedent(
+        """\
+    %HEADER {title}
+    Documentation of {irilink}
+
+    %HEADER Relations level=2
+    %ALL object_properties
+
+    %HEADER Classes level=2
+    %ALL classes
+
+    %HEADER Individuals level=2
+    %ALL individuals
+
+    %HEADER Appendix               level=1
+    %HEADER "Relation taxonomies"  level=2
+    %ALLFIG object_properties
+
+    %HEADER "Class taxonomies"     level=2
+    %ALLFIG classes
+    """
+    ).format(ontology=self.onto, title=title, irilink=irilink)
+    return template
+
+
+
+ +
+ + + +
+ + + +

+get_figure(self, path, caption='', width=None) + + +

+ +
+ +

Returns a formatted insert-figure-directive.

+ +
+ Source code in ontopy/ontodoc.py +
def get_figure(self, path, caption="", width=None):
+    """Returns a formatted insert-figure-directive."""
+    figwidth_style = self.style.get("figwidth", "")
+    figure_style = self.style.get("figure", "")
+    figwidth = figwidth_style.format(width=width) if width else ""
+    return figure_style.format(
+        path=path, caption=caption, figwidth=figwidth
+    )
+
+
+
+ +
+ + + +
+ + + +

+get_header(self, label, header_level=1, anchor=None) + + +

+ +
+ +

Returns label formatted as a header of given level.

+ +
+ Source code in ontopy/ontodoc.py +
def get_header(self, label, header_level=1, anchor=None):
+    """Returns `label` formatted as a header of given level."""
+    header_style = self.style.get("header", "{label}\n")
+    return header_style.format(
+        "",
+        level=header_level,
+        label=label,
+        anchor=anchor if anchor else label.lower().replace(" ", "-"),
+    )
+
+
+
+ +
+ + + +
+ + + +

+itemdoc(self, item, header_level=3, show_disjoints=False) + + +

+ +
+ +

Returns documentation of item.

+
Parameters
+

item : obj | label + The class, individual or relation to document. +header_level : int + Header level. Defaults to 3. +show_disjoints : Bool + Whether to show disjoint_with relations.

+ +
+ Source code in ontopy/ontodoc.py +
def itemdoc(
+    self, item, header_level=3, show_disjoints=False
+):  # pylint: disable=too-many-locals,too-many-branches,too-many-statements
+    """Returns documentation of `item`.
+
+    Parameters
+    ----------
+    item : obj | label
+        The class, individual or relation to document.
+    header_level : int
+        Header level. Defaults to 3.
+    show_disjoints : Bool
+        Whether to show `disjoint_with` relations.
+    """
+    onto = self.onto
+    if isinstance(item, str):
+        item = self.onto.get_by_label(item)
+
+    header_style = self.style.get("header", "{label}\n")
+    link_style = self.style.get("link", "{name}")
+    point_style = self.style.get("point", "{point}")
+    points_style = self.style.get("points", "{points}")
+    annotation_style = self.style.get("annotation", "{key}: {value}\n")
+    substitutions = self.style.get("substitutions", [])
+
+    # Logical "sorting" of annotations
+    order = {
+        "definition": "00",
+        "axiom": "01",
+        "theorem": "02",
+        "elucidation": "03",
+        "domain": "04",
+        "range": "05",
+        "example": "06",
+    }
+
+    doc = []
+
+    # Header
+    label = get_label(item)
+    iriname = item.iri.partition("#")[2]
+    anchor = iriname if iriname else label.lower()
+    doc.append(
+        header_style.format(
+            "",
+            level=header_level,
+            label=label,
+            anchor=anchor,
+        )
+    )
+
+    # Add warning about missing prefLabel
+    if not hasattr(item, "prefLabel") or not item.prefLabel.first():
+        doc.append(
+            annotation_style.format(
+                key="Warning", value="Missing prefLabel"
+            )
+        )
+
+    # Add iri
+    doc.append(
+        annotation_style.format(
+            key="IRI",
+            value=asstring(item.iri, link_style, ontology=onto),
+            ontology=onto,
+        )
+    )
+
+    # Add annotations
+    if isinstance(item, owlready2.Thing):
+        annotations = item.get_individual_annotations()
+    else:
+        annotations = item.get_annotations()
+
+    for key in sorted(
+        annotations.keys(), key=lambda key: order.get(key, key)
+    ):
+        for value in annotations[key]:
+            value = str(value)
+            if self.url_regex.match(value):
+                doc.append(
+                    annotation_style.format(
+                        key=key,
+                        value=asstring(value, link_style, ontology=onto),
+                    )
+                )
+            else:
+                for reg, sub in substitutions:
+                    value = re.sub(reg, sub, value)
+                doc.append(annotation_style.format(key=key, value=value))
+
+    # ...add relations from is_a
+    points = []
+    non_prop = (
+        owlready2.ThingClass,  # owlready2.Restriction,
+        owlready2.And,
+        owlready2.Or,
+        owlready2.Not,
+    )
+    for prop in item.is_a:
+        if isinstance(prop, non_prop) or (
+            isinstance(item, owlready2.PropertyClass)
+            and isinstance(prop, owlready2.PropertyClass)
+        ):
+            points.append(
+                point_style.format(
+                    point="is_a "
+                    + asstring(prop, link_style, ontology=onto),
+                    ontology=onto,
+                )
+            )
+        else:
+            points.append(
+                point_style.format(
+                    point=asstring(prop, link_style, ontology=onto),
+                    ontology=onto,
+                )
+            )
+
+    # ...add equivalent_to relations
+    for entity in item.equivalent_to:
+        points.append(
+            point_style.format(
+                point="equivalent_to "
+                + asstring(entity, link_style, ontology=onto)
+            )
+        )
+
+    # ...add disjoint_with relations
+    if show_disjoints and hasattr(item, "disjoint_with"):
+        subjects = set(item.disjoint_with(reduce=True))
+        points.append(
+            point_style.format(
+                point="disjoint_with "
+                + ", ".join(
+                    asstring(s, link_style, ontology=onto) for s in subjects
+                ),
+                ontology=onto,
+            )
+        )
+
+    # ...add disjoint_unions
+    if hasattr(item, "disjoint_unions"):
+        for unions in item.disjoint_unions:
+            string = ", ".join(
+                asstring(u, link_style, ontology=onto) for u in unions
+            )
+            points.append(
+                point_style.format(
+                    point=f"disjoint_union_of {string}", ontology=onto
+                )
+            )
+
+    # ...add inverse_of relations
+    if hasattr(item, "inverse_property") and item.inverse_property:
+        points.append(
+            point_style.format(
+                point="inverse_of "
+                + asstring(item.inverse_property, link_style, ontology=onto)
+            )
+        )
+
+    # ...add domain restrictions
+    for domain in getattr(item, "domain", ()):
+        points.append(
+            point_style.format(
+                point="domain "
+                + asstring(domain, link_style, ontology=onto)
+            )
+        )
+
+    # ...add range restrictions
+    for restriction in getattr(item, "range", ()):
+        points.append(
+            point_style.format(
+                point="range "
+                + asstring(restriction, link_style, ontology=onto)
+            )
+        )
+
+    # Add points (from is_a)
+    if points:
+        value = points_style.format(points="".join(points), ontology=onto)
+        doc.append(
+            annotation_style.format(
+                key="Subclass of", value=value, ontology=onto
+            )
+        )
+
+    # Instances (individuals)
+    if hasattr(item, "instances"):
+        points = []
+
+        for instance in item.instances():
+            if isinstance(instance.is_instance_of, property):
+                warnings.warn(
+                    f'Ignoring instance "{instance}" which is both and '
+                    "indivudual and class. Ontodoc does not support "
+                    "punning at the present moment."
+                )
+                continue
+            if item in instance.is_instance_of:
+                points.append(
+                    point_style.format(
+                        point=asstring(instance, link_style, ontology=onto),
+                        ontology=onto,
+                    )
+                )
+            if points:
+                value = points_style.format(
+                    points="".join(points), ontology=onto
+                )
+                doc.append(
+                    annotation_style.format(
+                        key="Individuals", value=value, ontology=onto
+                    )
+                )
+
+    return "\n".join(doc)
+
+
+
+ +
+ + + +
+ + + +

+itemsdoc(self, items, header_level=3) + + +

+ +
+ +

Returns documentation of items.

+ +
+ Source code in ontopy/ontodoc.py +
def itemsdoc(self, items, header_level=3):
+    """Returns documentation of `items`."""
+    sep_style = self.style.get("sep", "\n")
+    doc = []
+    for item in items:
+        doc.append(self.itemdoc(item, header_level))
+        doc.append(sep_style.format(ontology=self.onto))
+    return "\n".join(doc)
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + +
+ + + +

+append_pandoc_options(options, updates) + + +

+ +
+ +

Append updates to pandoc options options.

+

Parameters

+

options : sequence + Sequence with initial Pandoc options. +updates : sequence of str + Sequence of strings of the form "--longoption=value", where + longoption is a valid pandoc long option and value is the + new value. The "=value" part is optional.

+
Strings of the form "no-longoption" will filter out "--longoption"
+from `options`.
+
+

Returns

+

new_options : list + Updated pandoc options.

+ +
+ Source code in ontopy/ontodoc.py +
def append_pandoc_options(options, updates):
+    """Append `updates` to pandoc options `options`.
+
+    Parameters
+    ----------
+    options : sequence
+        Sequence with initial Pandoc options.
+    updates : sequence of str
+        Sequence of strings of the form "--longoption=value", where
+        ``longoption`` is a valid pandoc long option and ``value`` is the
+        new value.  The "=value" part is optional.
+
+        Strings of the form "no-longoption" will filter out "--longoption"
+        from `options`.
+
+    Returns
+    -------
+    new_options : list
+        Updated pandoc options.
+    """
+    # Valid pandoc options starting with "--no-XXX"
+    no_options = set("no-highlight")
+
+    if not updates:
+        return list(options)
+
+    curated_updates = {}
+    for update in updates:
+        key, sep, value = update.partition("=")
+        curated_updates[key.lstrip("-")] = value if sep else None
+        filter_out = set(
+            _
+            for _ in curated_updates
+            if _.startswith("no-") and _ not in no_options
+        )
+        _filter_out = set(f"--{_[3:]}" for _ in filter_out)
+        new_options = [
+            opt for opt in options if opt.partition("=")[0] not in _filter_out
+        ]
+        new_options.extend(
+            [
+                f"--{key}" if value is None else f"--{key}={value}"
+                for key, value in curated_updates.items()
+                if key not in filter_out
+            ]
+        )
+    return new_options
+
+
+
+ +
+ + + +
+ + + +

+get_docpp(ontodoc, infile, figdir='genfigs', figformat='png', maxwidth=None, imported=False) + + +

+ +
+ +

Read infile and return a new docpp instance.

+ +
+ Source code in ontopy/ontodoc.py +
def get_docpp(  # pylint: disable=too-many-arguments
+    ontodoc,
+    infile,
+    figdir="genfigs",
+    figformat="png",
+    maxwidth=None,
+    imported=False,
+):
+    """Read `infile` and return a new docpp instance."""
+    if infile:
+        with open(infile, "rt") as handle:
+            template = handle.read()
+        basedir = os.path.dirname(infile)
+    else:
+        template = ontodoc.get_default_template()
+        basedir = "."
+
+    docpp = DocPP(
+        template,
+        ontodoc,
+        basedir=basedir,
+        figdir=figdir,
+        figformat=figformat,
+        maxwidth=maxwidth,
+        imported=imported,
+    )
+
+    return docpp
+
+
+
+ +
+ + + +
+ + + +

+get_figformat(fmt) + + +

+ +
+ +

Infer preferred figure format from output format.

+ +
+ Source code in ontopy/ontodoc.py +
def get_figformat(fmt):
+    """Infer preferred figure format from output format."""
+    if fmt == "pdf":
+        figformat = "pdf"  # XXX
+    elif "html" in fmt:
+        figformat = "svg"
+    else:
+        figformat = "png"
+    return figformat
+
+
+
+ +
+ + + +
+ + + +

+get_maxwidth(fmt) + + +

+ +
+ +

Infer preferred max figure width from output format.

+ +
+ Source code in ontopy/ontodoc.py +
def get_maxwidth(fmt):
+    """Infer preferred max figure width from output format."""
+    if fmt == "pdf":
+        maxwidth = 668
+    else:
+        maxwidth = 1024
+    return maxwidth
+
+
+
+ +
+ + + +
+ + + +

+get_options(opts, **kwargs) + + +

+ +
+ +

Returns a dict with options from the sequence opts with +"name=value" pairs. Valid option names and default values are +provided with the keyword arguments.

+ +
+ Source code in ontopy/ontodoc.py +
def get_options(opts, **kwargs):
+    """Returns a dict with options from the sequence `opts` with
+    "name=value" pairs. Valid option names and default values are
+    provided with the keyword arguments."""
+    res = AttributeDict(kwargs)
+    for opt in opts:
+        if "=" not in opt:
+            raise InvalidTemplateError(
+                f'Missing "=" in template option: {opt!r}'
+            )
+        name, value = opt.split("=", 1)
+        if name not in res:
+            raise InvalidTemplateError(f"Invalid template option: {name!r}")
+        res_type = type(res[name])
+        res[name] = res_type(value)
+    return res
+
+
+
+ +
+ + + +
+ + + +

+get_style(fmt) + + +

+ +
+ +

Infer style from output format.

+ +
+ Source code in ontopy/ontodoc.py +
def get_style(fmt):
+    """Infer style from output format."""
+    if fmt == "simple-html":
+        style = "html"
+    elif fmt in ("tex", "latex", "pdf"):
+        style = "markdown_tex"
+    else:
+        style = "markdown"
+    return style
+
+
+
+ +
+ + + +
+ + + +

+load_pandoc_option_file(yamlfile) + + +

+ +
+ +

Loads pandoc options from yamlfile and return a list with +corresponding pandoc command line arguments.

+ +
+ Source code in ontopy/ontodoc.py +
def load_pandoc_option_file(yamlfile):
+    """Loads pandoc options from `yamlfile` and return a list with
+    corresponding pandoc command line arguments."""
+    with open(yamlfile) as handle:
+        pandoc_options = yaml.safe_load(handle)
+    options = pandoc_options.pop("input-files", [])
+    variables = pandoc_options.pop("variables", {})
+
+    for key, value in pandoc_options.items():
+        if isinstance(value, bool):
+            if value:
+                options.append(f"--{key}")
+        else:
+            options.append(f"--{key}={value}")
+
+    for key, value in variables.items():
+        if key == "date" and value == "now":
+            value = time.strftime("%B %d, %Y")
+        options.append(f"--variable={key}:{value}")
+
+    return options
+
+
+
+ +
+ + + +
+ + + +

+run_pandoc(genfile, outfile, fmt, pandoc_option_files=(), pandoc_options=(), verbose=True) + + +

+ +
+ +

Runs pandoc.

+

Parameters

+

genfile : str + Name of markdown input file. +outfile : str + Output file name. +fmt : str + Output format. +pandoc_option_files : sequence + List of files with additional pandoc options. Default is to read + "pandoc-options.yaml" and "pandoc-FORMAT-options.yml", where + FORMAT is the output format. +pandoc_options : sequence + Additional pandoc options overriding options read from + pandoc_option_files. +verbose : bool + Whether to print the pandoc command before execution.

+

Raises

+

subprocess.CalledProcessError + If the pandoc process returns with non-zero status. The returncode + attribute will hold the exit code.

+ +
+ Source code in ontopy/ontodoc.py +
def run_pandoc(  # pylint: disable=too-many-arguments
+    genfile,
+    outfile,
+    fmt,
+    pandoc_option_files=(),
+    pandoc_options=(),
+    verbose=True,
+):
+    """Runs pandoc.
+
+    Parameters
+    ----------
+    genfile : str
+        Name of markdown input file.
+    outfile : str
+        Output file name.
+    fmt : str
+        Output format.
+    pandoc_option_files : sequence
+        List of files with additional pandoc options.  Default is to read
+        "pandoc-options.yaml" and "pandoc-FORMAT-options.yml", where
+        `FORMAT` is the output format.
+    pandoc_options : sequence
+        Additional pandoc options overriding options read from
+        `pandoc_option_files`.
+    verbose : bool
+        Whether to print the pandoc command before execution.
+
+    Raises
+    ------
+    subprocess.CalledProcessError
+        If the pandoc process returns with non-zero status.  The `returncode`
+        attribute will hold the exit code.
+    """
+    # Create pandoc argument list
+    args = [genfile]
+    files = ["pandoc-options.yaml", f"pandoc-{fmt}-options.yaml"]
+    if pandoc_option_files:
+        files = pandoc_option_files
+    for fname in files:
+        if os.path.exists(fname):
+            args.extend(load_pandoc_option_file(fname))
+        else:
+            warnings.warn(f"missing pandoc option file: {fname}")
+
+    # Update pandoc argument list
+    args = append_pandoc_options(args, pandoc_options)
+
+    # pdf output requires a special attention...
+    if fmt == "pdf":
+        pdf_engine = "pdflatex"
+        for arg in args:
+            if arg.startswith("--pdf-engine"):
+                pdf_engine = arg.split("=", 1)[1]
+                break
+        with TemporaryDirectory() as tmpdir:
+            run_pandoc_pdf(tmpdir, pdf_engine, outfile, args, verbose=verbose)
+    else:
+        args.append(f"--output={outfile}")
+        cmd = ["pandoc"] + args
+        if verbose:
+            print()
+            print("* Executing command:")
+            print(" ".join(shlex.quote(_) for _ in cmd))
+        subprocess.check_call(cmd)  # nosec
+
+
+
+ +
+ + + +
+ + + +

+run_pandoc_pdf(latex_dir, pdf_engine, outfile, args, verbose=True) + + +

+ +
+ +

Run pandoc for pdf generation.

+ +
+ Source code in ontopy/ontodoc.py +
def run_pandoc_pdf(latex_dir, pdf_engine, outfile, args, verbose=True):
+    """Run pandoc for pdf generation."""
+    basename = os.path.join(
+        latex_dir, os.path.splitext(os.path.basename(outfile))[0]
+    )
+
+    # Run pandoc
+    texfile = basename + ".tex"
+    args.append(f"--output={texfile}")
+    cmd = ["pandoc"] + args
+    if verbose:
+        print()
+        print("* Executing commands:")
+        print(" ".join(shlex.quote(s) for s in cmd))
+    subprocess.check_call(cmd)  # nosec
+
+    # Fixing tex output
+    texfile2 = basename + "2.tex"
+    with open(texfile, "rt") as handle:
+        content = handle.read().replace(r"\$\Uptheta\$", r"$\Uptheta$")
+    with open(texfile2, "wt") as handle:
+        handle.write(content)
+
+    # Run latex
+    pdffile = basename + "2.pdf"
+    cmd = [
+        pdf_engine,
+        texfile2,
+        "-halt-on-error",
+        f"-output-directory={latex_dir}",
+    ]
+    if verbose:
+        print()
+        print(" ".join(shlex.quote(s) for s in cmd))
+    output = subprocess.check_output(cmd, timeout=60)  # nosec
+    output = subprocess.check_output(cmd, timeout=60)  # nosec
+
+    # Workaround for non-working "-output-directory" latex option
+    if not os.path.exists(pdffile):
+        if os.path.exists(os.path.basename(pdffile)):
+            pdffile = os.path.basename(pdffile)
+            for ext in "aux", "out", "toc", "log":
+                filename = os.path.splitext(pdffile)[0] + "." + ext
+                if os.path.exists(filename):
+                    os.remove(filename)
+        else:
+            print()
+            print(output)
+            print()
+            raise RuntimeError("latex did not produce pdf file: " + pdffile)
+
+    # Copy pdffile
+    if not os.path.exists(outfile) or not os.path.samefile(pdffile, outfile):
+        if verbose:
+            print()
+            print(f"move {pdffile} to {outfile}")
+        shutil.move(pdffile, outfile)
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/ontopy/ontology/index.html b/0.6.1/api_reference/ontopy/ontology/index.html new file mode 100644 index 000000000..08b69dd65 --- /dev/null +++ b/0.6.1/api_reference/ontopy/ontology/index.html @@ -0,0 +1,8187 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + ontology - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + +

ontology

+ + +
+ + +
+ +

A module adding additional functionality to owlready2.

+

If desirable some of these additions may be moved back into owlready2.

+ + + +
+ + + + + + + + +
+ + + +

+ +BlankNode + + + +

+ +
+ +

Represents a blank node.

+

A blank node is a node that is not a literal and has no IRI. +Resources represented by blank nodes are also called anonumous resources. +Only the subject or object in an RDF triple can be a blank node.

+ +
+ Source code in ontopy/ontology.py +
class BlankNode:
+    """Represents a blank node.
+
+    A blank node is a node that is not a literal and has no IRI.
+    Resources represented by blank nodes are also called anonumous resources.
+    Only the subject or object in an RDF triple can be a blank node.
+    """
+
+    def __init__(self, onto: Union[World, Ontology], storid: int):
+        """Initiate a blank node.
+
+        Args:
+            onto: Ontology or World instance.
+            storid: The storage id of the blank node.
+        """
+        if storid >= 0:
+            raise ValueError(
+                f"A BlankNode is supposed to have a negative storid: {storid}"
+            )
+        self.onto = onto
+        self.storid = storid
+
+    def __repr__(self):
+        return repr(f"_:b{-self.storid}")
+
+    def __hash__(self):
+        return hash((self.onto, self.storid))
+
+    def __eq__(self, other):
+        """For now blank nodes always compare true against each other."""
+        return isinstance(other, BlankNode)
+
+
+ + + +
+ + + + + + + + + +
+ + + +

+__init__(self, onto, storid) + + + special + + +

+ +
+ +

Initiate a blank node.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
ontoUnion[ontopy.ontology.World, ontopy.ontology.Ontology]

Ontology or World instance.

required
storidint

The storage id of the blank node.

required
+
+ Source code in ontopy/ontology.py +
def __init__(self, onto: Union[World, Ontology], storid: int):
+    """Initiate a blank node.
+
+    Args:
+        onto: Ontology or World instance.
+        storid: The storage id of the blank node.
+    """
+    if storid >= 0:
+        raise ValueError(
+            f"A BlankNode is supposed to have a negative storid: {storid}"
+        )
+    self.onto = onto
+    self.storid = storid
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + +
+ + + +

+ +Ontology (Ontology) + + + + +

+ +
+ +

A generic class extending owlready2.Ontology.

+ +
+ Source code in ontopy/ontology.py +
class Ontology(owlready2.Ontology):  # pylint: disable=too-many-public-methods
+    """A generic class extending owlready2.Ontology."""
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.label_annotations = DEFAULT_LABEL_ANNOTATIONS[:]
+        self.prefix = None
+
+    # Name of special unlabeled entities, like Thing, Nothing, etc...
+    _special_labels = None
+
+    # Some properties for customising dir() listing - useful in
+    # interactive sessions...
+    _dir_preflabel = isinteractive()
+    _dir_label = isinteractive()
+    _dir_name = False
+    _dir_imported = isinteractive()
+    dir_preflabel = property(
+        fget=lambda self: self._dir_preflabel,
+        fset=lambda self, v: setattr(self, "_dir_preflabel", bool(v)),
+        doc="Whether to include entity prefLabel in dir() listing.",
+    )
+    dir_label = property(
+        fget=lambda self: self._dir_label,
+        fset=lambda self, v: setattr(self, "_dir_label", bool(v)),
+        doc="Whether to include entity label in dir() listing.",
+    )
+    dir_name = property(
+        fget=lambda self: self._dir_name,
+        fset=lambda self, v: setattr(self, "_dir_name", bool(v)),
+        doc="Whether to include entity name in dir() listing.",
+    )
+    dir_imported = property(
+        fget=lambda self: self._dir_imported,
+        fset=lambda self, v: setattr(self, "_dir_imported", bool(v)),
+        doc="Whether to include imported ontologies in dir() listing.",
+    )
+
+    # Other settings
+    _colon_in_label = False
+    colon_in_label = property(
+        fget=lambda self: self._colon_in_label,
+        fset=lambda self, v: setattr(self, "_colon_in_label", bool(v)),
+        doc="Whether to accept colon in name-part of IRI.  "
+        "If true, the name cannot be prefixed.",
+    )
+
+    def __dir__(self):
+        dirset = set(super().__dir__())
+        lst = list(self.get_entities(imported=self._dir_imported))
+        if self._dir_preflabel:
+            dirset.update(
+                str(dir.prefLabel.first())
+                for dir in lst
+                if hasattr(dir, "prefLabel")
+            )
+        if self._dir_label:
+            dirset.update(
+                str(dir.label.first()) for dir in lst if hasattr(dir, "label")
+            )
+        if self._dir_name:
+            dirset.update(dir.name for dir in lst if hasattr(dir, "name"))
+        dirset.difference_update({None})  # get rid of possible None
+        return sorted(dirset)
+
+    def __getitem__(self, name):
+        item = super().__getitem__(name)
+        if not item:
+            item = self.get_by_label(name)
+        return item
+
+    def __getattr__(self, name):
+        attr = super().__getattr__(name)
+        if not attr:
+            attr = self.get_by_label(name)
+        return attr
+
+    def __contains__(self, other):
+        if self.world[other]:
+            return True
+        try:
+            self.get_by_label(other)
+        except NoSuchLabelError:
+            return False
+        return True
+
+    def __objclass__(self):
+        # Play nice with inspect...
+        pass
+
+    def __hash__(self):
+        """Returns a hash based on base_iri.
+        This is done to keep Ontology hashable when defining __eq__.
+        """
+        return hash(self.base_iri)
+
+    def __eq__(self, other):
+        """Checks if this ontology is equal to `other`.
+
+        This function compares the result of
+        ``set(self.get_unabbreviated_triples(label='_:b'))``,
+        i.e. blank nodes are not distinguished, but relations to blank
+        nodes are included.
+        """
+        return set(self.get_unabbreviated_triples(blank="_:b")) == set(
+            other.get_unabbreviated_triples(blank="_:b")
+        )
+
+    def get_unabbreviated_triples(
+        self, subject=None, predicate=None, obj=None, blank=None
+    ):
+        """Returns all matching triples unabbreviated.
+
+        If `blank` is given, it will be used to represent blank nodes.
+        """
+        # pylint: disable=invalid-name
+        return _get_unabbreviated_triples(
+            self, subject=subject, predicate=predicate, obj=obj, blank=blank
+        )
+
+    def set_default_label_annotations(self):
+        """Sets the default label annotations."""
+        warnings.warn(
+            "Ontology.set_default_label_annotations() is deprecated. "
+            "Default label annotations are set by Ontology.__init__(). ",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        self.label_annotations = DEFAULT_LABEL_ANNOTATIONS[:]
+
+    def get_by_label(
+        self,
+        label: str,
+        label_annotations: str = None,
+        prefix: str = None,
+        imported: bool = True,
+        colon_in_label: bool = None,
+    ):
+        """Returns entity with label annotation `label`.
+
+        Arguments:
+           label: label so search for.
+               May be written as 'label' or 'prefix:label'.
+               get_by_label('prefix:label') ==
+               get_by_label('label', prefix='prefix').
+           label_annotations: a sequence of label annotation names to look up.
+               Defaults to the `label_annotations` property.
+           prefix: if provided, it should be the last component of
+               the base iri of an ontology (with trailing slash (/) or hash
+               (#) stripped off).  The search for a matching label will be
+               limited to this namespace.
+           imported: Whether to also look for `label` in imported ontologies.
+           colon_in_label: Whether to accept colon (:) in a label or name-part
+               of IRI.  Defaults to the `colon_in_label` property of `self`.
+               Setting this true cannot be combined with `prefix`.
+
+        If several entities have the same label, only the one which is
+        found first is returned.Use get_by_label_all() to get all matches.
+
+        Note, if different prefixes are provided in the label and via
+        the `prefix` argument a warning will be issued and the
+        `prefix` argument will take precedence.
+
+        A NoSuchLabelError is raised if `label` cannot be found.
+        """
+        # pylint: disable=too-many-arguments,too-many-branches,invalid-name
+        if not isinstance(label, str):
+            raise TypeError(
+                f"Invalid label definition, must be a string: '{label}'"
+            )
+
+        if label_annotations is None:
+            label_annotations = self.label_annotations
+
+        if colon_in_label is None:
+            colon_in_label = self._colon_in_label
+        if colon_in_label:
+            if prefix:
+                raise ValueError(
+                    "`prefix` cannot be combined with `colon_in_label`"
+                )
+        else:
+            splitlabel = label.split(":", 1)
+            if len(splitlabel) == 2 and not splitlabel[1].startswith("//"):
+                label = splitlabel[1]
+                if prefix and prefix != splitlabel[0]:
+                    warnings.warn(
+                        f"Prefix given both as argument ({prefix}) "
+                        f"and in label ({splitlabel[0]}). "
+                        "Prefix given in argument takes precedence. "
+                    )
+                if not prefix:
+                    prefix = splitlabel[0]
+
+        if prefix:
+            entityset = self.get_by_label_all(
+                label,
+                label_annotations=label_annotations,
+                prefix=prefix,
+            )
+            if len(entityset) == 1:
+                return entityset.pop()
+            if len(entityset) > 1:
+                raise AmbiguousLabelError(
+                    f"Several entities have the same label '{label}' "
+                    f"with prefix '{prefix}'."
+                )
+            raise NoSuchLabelError(
+                f"No label annotations matches for '{label}' "
+                f"with prefix '{prefix}'."
+            )
+
+        # Label is a full IRI
+        entity = self.world[label]
+        if entity:
+            return entity
+
+        get_triples = (
+            self.world._get_data_triples_spod_spod
+            if imported
+            else self._get_data_triples_spod_spod
+        )
+
+        for storid in self._to_storids(label_annotations):
+            for s, _, _, _ in get_triples(None, storid, label, None):
+                return self.world[self._unabbreviate(s)]
+
+        # Special labels
+        if self._special_labels and label in self._special_labels:
+            return self._special_labels[label]
+
+        # Check if label is a name under base_iri
+        entity = self.world[self.base_iri + label]
+        if entity:
+            return entity
+
+        # Check label is the name of an entity
+        for entity in self.get_entities(imported=imported):
+            if label == entity.name:
+                return entity
+
+        raise NoSuchLabelError(f"No label annotations matches '{label}'")
+
+    def get_by_label_all(
+        self,
+        label,
+        label_annotations=None,
+        prefix=None,
+        exact_match=False,
+    ) -> "Set[Optional[owlready2.entity.EntityClass]]":
+        """Returns set of entities with label annotation `label`.
+
+        Arguments:
+           label: label so search for.
+               May be written as 'label' or 'prefix:label'.  Wildcard matching
+               using glob pattern is also supported if `exact_match` is set to
+               false.
+           label_annotations: a sequence of label annotation names to look up.
+               Defaults to the `label_annotations` property.
+           prefix: if provided, it should be the last component of
+               the base iri of an ontology (with trailing slash (/) or hash
+               (#) stripped off).  The search for a matching label will be
+               limited to this namespace.
+           exact_match: Do not treat "*" and brackets as special characters
+               when matching.  May be useful if your ontology has labels
+               containing such labels.
+
+        Returns:
+            Set of all matching entities or an empty set if no matches
+            could be found.
+        """
+        if not isinstance(label, str):
+            raise TypeError(
+                f"Invalid label definition, " f"must be a string: {label!r}"
+            )
+        if " " in label:
+            raise ValueError(
+                f"Invalid label definition, {label!r} contains spaces."
+            )
+
+        if label_annotations is None:
+            label_annotations = self.label_annotations
+
+        entities = set()
+
+        # Check label annotations
+        if exact_match:
+            for storid in self._to_storids(label_annotations):
+                entities.update(
+                    self.world._get_by_storid(s)
+                    for s, _, _ in self.world._get_data_triples_spod_spod(
+                        None, storid, str(label), None
+                    )
+                )
+        else:
+            for storid in self._to_storids(label_annotations):
+                label_entity = self._unabbreviate(storid)
+                key = (
+                    label_entity.name
+                    if hasattr(label_entity, "name")
+                    else label_entity
+                )
+                entities.update(self.world.search(**{key: label}))
+
+        if self._special_labels and label in self._special_labels:
+            entities.update(self._special_labels[label])
+
+        # Check name-part of IRI
+        if exact_match:
+            entities.update(
+                ent for ent in self.get_entities() if ent.name == str(label)
+            )
+        else:
+            matches = fnmatch.filter(
+                (ent.name for ent in self.get_entities()), label
+            )
+            entities.update(
+                ent for ent in self.get_entities() if ent.name in matches
+            )
+
+        if prefix:
+            return set(
+                ent
+                for ent in entities
+                if ent.namespace.ontology.prefix == prefix
+            )
+        return entities
+
+    def _to_storids(self, sequence, create_if_missing=False):
+        """Return a list of storid's corresponding to the elements in the
+        sequence `sequence`.
+
+        The elements may be either be full IRIs (strings) or Owlready2
+        entities with an associated storid.
+
+        If `create_if_missing` is true, new Owlready2 entities will be
+        created for IRIs that not already are associated with an
+        entity.  Otherwise such IRIs will be skipped in the returned
+        list.
+        """
+        if not sequence:
+            return []
+        storids = []
+        for element in sequence:
+            if hasattr(element, "storid"):
+                storids.append(element.storid)
+            else:
+                storid = self.world._abbreviate(element, create_if_missing)
+                if storid:
+                    storids.append(storid)
+        return storids
+
+    def add_label_annotation(self, iri):
+        """Adds label annotation used by get_by_label()."""
+        warnings.warn(
+            "Ontology.add_label_annotations() is deprecated. "
+            "Direct modify the `label_annotations` attribute instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        if hasattr(iri, "iri"):
+            iri = iri.iri
+        if iri not in self.label_annotations:
+            self.label_annotations.append(iri)
+
+    def remove_label_annotation(self, iri):
+        """Removes label annotation used by get_by_label()."""
+        warnings.warn(
+            "Ontology.remove_label_annotations() is deprecated. "
+            "Direct modify the `label_annotations` attribute instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        if hasattr(iri, "iri"):
+            iri = iri.iri
+        try:
+            self.label_annotations.remove(iri)
+        except ValueError:
+            pass
+
+    def set_common_prefix(
+        self,
+        iri_base: str = "http://emmo.info/emmo",
+        prefix: str = "emmo",
+        visited: "Optional[Set]" = None,
+    ) -> None:
+        """Set a common prefix for all imported ontologies
+        with the same first part of the base_iri.
+
+        Args:
+            iri_base: The start of the base_iri to look for. Defaults to
+                the emmo base_iri http://emmo.info/emmo
+            prefix: the desired prefix. Defaults to emmo.
+            visited: Ontologies to skip. Only intended for internal use.
+        """
+        if visited is None:
+            visited = set()
+        if self.base_iri.startswith(iri_base):
+            self.prefix = prefix
+        for onto in self.imported_ontologies:
+            if not onto in visited:
+                visited.add(onto)
+                onto.set_common_prefix(
+                    iri_base=iri_base, prefix=prefix, visited=visited
+                )
+
+    def load(  # pylint: disable=too-many-arguments,arguments-renamed
+        self,
+        only_local=False,
+        filename=None,
+        format=None,  # pylint: disable=redefined-builtin
+        reload=None,
+        reload_if_newer=False,
+        url_from_catalog=None,
+        catalog_file="catalog-v001.xml",
+        emmo_based=True,
+        prefix=None,
+        prefix_emmo=None,
+        **kwargs,
+    ):
+        """Load the ontology.
+
+        Arguments
+        ---------
+        only_local: bool
+            Whether to only read local files.  This requires that you
+            have appended the path to the ontology to owlready2.onto_path.
+        filename: str
+            Path to file to load the ontology from.  Defaults to `base_iri`
+            provided to get_ontology().
+        format: str
+            Format of `filename`.  Default is inferred from `filename`
+            extension.
+        reload: bool
+            Whether to reload the ontology if it is already loaded.
+        reload_if_newer: bool
+            Whether to reload the ontology if the source has changed since
+            last time it was loaded.
+        url_from_catalog: bool | None
+            Whether to use catalog file to resolve the location of `base_iri`.
+            If None, the catalog file is used if it exists in the same
+            directory as `filename`.
+        catalog_file: str
+            Name of Protègè catalog file in the same folder as the
+            ontology.  This option is used together with `only_local` and
+            defaults to "catalog-v001.xml".
+        emmo_based: bool
+            Whether this is an EMMO-based ontology or not, default `True`.
+        prefix: defaults to self.get_namespace.name if
+        prefix_emmo: bool, default None. If emmo_based is True it
+            defaults to True and sets the prefix of all imported ontologies
+            with base_iri starting with 'http://emmo.info/emmo' to emmo
+        kwargs:
+            Additional keyword arguments are passed on to
+            owlready2.Ontology.load().
+        """
+        # TODO: make sure that `only_local` argument is respected...
+
+        if self.loaded:
+            return self
+        self._load(
+            only_local=only_local,
+            filename=filename,
+            format=format,
+            reload=reload,
+            reload_if_newer=reload_if_newer,
+            url_from_catalog=url_from_catalog,
+            catalog_file=catalog_file,
+            **kwargs,
+        )
+
+        # Enable optimised search by get_by_label()
+        if self._special_labels is None and emmo_based:
+            top = self.world["http://www.w3.org/2002/07/owl#topObjectProperty"]
+            self._special_labels = {
+                "Thing": owlready2.Thing,
+                "Nothing": owlready2.Nothing,
+                "topObjectProperty": top,
+                "owl:Thing": owlready2.Thing,
+                "owl:Nothing": owlready2.Nothing,
+                "owl:topObjectProperty": top,
+            }
+        # set prefix if another prefix is desired
+        # if we do this, shouldn't we make the name of all
+        # entities of the given ontology to the same?
+        if prefix:
+            self.prefix = prefix
+        else:
+            self.prefix = self.name
+
+        if emmo_based and prefix_emmo is None:
+            prefix_emmo = True
+        if prefix_emmo:
+            self.set_common_prefix()
+
+        return self
+
+    def _load(  # pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements
+        self,
+        only_local=False,
+        filename=None,
+        format=None,  # pylint: disable=redefined-builtin
+        reload=None,
+        reload_if_newer=False,
+        url_from_catalog=None,
+        catalog_file="catalog-v001.xml",
+        **kwargs,
+    ):
+        """Help function for load()."""
+        web_protocol = "http://", "https://", "ftp://"
+        url = str(filename) if filename else self.base_iri.rstrip("/#")
+        if url.startswith(web_protocol):
+            baseurl = os.path.dirname(url)
+            catalogurl = baseurl + "/" + catalog_file
+        else:
+            if url.startswith("file://"):
+                url = url[7:]
+            url = os.path.normpath(os.path.abspath(url))
+            baseurl = os.path.dirname(url)
+            catalogurl = os.path.join(baseurl, catalog_file)
+
+        def getmtime(path):
+            if os.path.exists(path):
+                return os.path.getmtime(path)
+            return 0.0
+
+        # Resolve url from catalog file
+        iris = {}
+        dirs = set()
+        if url_from_catalog or url_from_catalog is None:
+            not_reload = not reload and (
+                not reload_if_newer
+                or getmtime(catalogurl)
+                > self.world._cached_catalogs[catalogurl][0]
+            )
+            # get iris from catalog already in cached catalogs
+            if catalogurl in self.world._cached_catalogs and not_reload:
+                _, iris, dirs = self.world._cached_catalogs[catalogurl]
+            # do not update cached_catalogs if url already in _iri_mappings
+            # and reload not forced
+            elif url in self.world._iri_mappings and not_reload:
+                pass
+            # update iris from current catalogurl
+            else:
+                try:
+                    iris, dirs = read_catalog(
+                        uri=catalogurl,
+                        recursive=False,
+                        return_paths=True,
+                        catalog_file=catalog_file,
+                    )
+                except ReadCatalogError:
+                    if url_from_catalog is not None:
+                        raise
+                    self.world._cached_catalogs[catalogurl] = (0.0, {}, set())
+                else:
+                    self.world._cached_catalogs[catalogurl] = (
+                        getmtime(catalogurl),
+                        iris,
+                        dirs,
+                    )
+            self.world._iri_mappings.update(iris)
+        resolved_url = self.world._iri_mappings.get(url, url)
+        # Append paths from catalog file to onto_path
+        for path in sorted(dirs, reverse=True):
+            if path not in owlready2.onto_path:
+                owlready2.onto_path.append(path)
+
+        # Use catalog file to update IRIs of imported ontologies
+        # in internal store and try to load again...
+        if self.world._iri_mappings:
+            for abbrev_iri in self.world._get_obj_triples_sp_o(
+                self.storid, owlready2.owl_imports
+            ):
+                iri = self._unabbreviate(abbrev_iri)
+                if iri in self.world._iri_mappings:
+                    self._del_obj_triple_spo(
+                        self.storid, owlready2.owl_imports, abbrev_iri
+                    )
+                    self._add_obj_triple_spo(
+                        self.storid,
+                        owlready2.owl_imports,
+                        self._abbreviate(self.world._iri_mappings[iri]),
+                    )
+
+        # Load ontology
+        try:
+            self.loaded = False
+            fmt = format if format else guess_format(resolved_url, fmap=FMAP)
+            if fmt and fmt not in OWLREADY2_FORMATS:
+                # Convert filename to rdfxml before passing it to owlready2
+                graph = rdflib.Graph()
+                try:
+                    graph.parse(resolved_url, format=fmt)
+                except URLError as err:
+                    raise EMMOntoPyException(
+                        "URL error", err, resolved_url
+                    ) from err
+
+                with tempfile.NamedTemporaryFile() as handle:
+                    graph.serialize(destination=handle, format="xml")
+                    handle.seek(0)
+                    return super().load(
+                        only_local=True,
+                        fileobj=handle,
+                        reload=reload,
+                        reload_if_newer=reload_if_newer,
+                        format="rdfxml",
+                        **kwargs,
+                    )
+            elif resolved_url.startswith(web_protocol):
+                return super().load(
+                    only_local=only_local,
+                    reload=reload,
+                    reload_if_newer=reload_if_newer,
+                    **kwargs,
+                )
+
+            else:
+                with open(resolved_url, "rb") as handle:
+                    return super().load(
+                        only_local=only_local,
+                        fileobj=handle,
+                        reload=reload,
+                        reload_if_newer=reload_if_newer,
+                        **kwargs,
+                    )
+        except owlready2.OwlReadyOntologyParsingError:
+            # Owlready2 is not able to parse the ontology - most
+            # likely because imported ontologies must be resolved
+            # using the catalog file.
+
+            # Reraise if we don't want to read from the catalog file
+            if not url_from_catalog and url_from_catalog is not None:
+                raise
+
+            warnings.warn(
+                "Recovering from Owlready2 parsing error... might be deprecated"
+            )
+
+            # Copy the ontology into a local folder and try again
+            with tempfile.TemporaryDirectory() as handle:
+                output = os.path.join(handle, os.path.basename(resolved_url))
+                convert_imported(
+                    input_ontology=resolved_url,
+                    output_ontology=output,
+                    input_format=fmt,
+                    output_format="xml",
+                    url_from_catalog=url_from_catalog,
+                    catalog_file=catalog_file,
+                )
+
+                self.loaded = False
+                with open(output, "rb") as handle:
+                    try:
+                        return super().load(
+                            only_local=True,
+                            fileobj=handle,
+                            reload=reload,
+                            reload_if_newer=reload_if_newer,
+                            format="rdfxml",
+                            **kwargs,
+                        )
+                    except HTTPError as exc:  # Add url to HTTPError message
+                        raise HTTPError(
+                            url=exc.url,
+                            code=exc.code,
+                            msg=f"{exc.url}: {exc.msg}",
+                            hdrs=exc.hdrs,
+                            fp=exc.fp,
+                        ).with_traceback(exc.__traceback__)
+
+        except HTTPError as exc:  # Add url to HTTPError message
+            raise HTTPError(
+                url=exc.url,
+                code=exc.code,
+                msg=f"{exc.url}: {exc.msg}",
+                hdrs=exc.hdrs,
+                fp=exc.fp,
+            ).with_traceback(exc.__traceback__)
+
+    def save(
+        self,
+        filename=None,
+        format=None,
+        dir=".",
+        mkdir=False,
+        overwrite=False,
+        recursive=False,
+        squash=False,
+        write_catalog_file=False,
+        append_catalog=False,
+        catalog_file="catalog-v001.xml",
+    ):
+        """Writes the ontology to file.
+
+        Parameters
+        ----------
+        filename: None | str | Path
+            Name of file to write to.  If None, it defaults to the name
+            of the ontology with `format` as file extension.
+        format: str
+            Output format. The default is to infer it from `filename`.
+        dir: str | Path
+            If `filename` is a relative path, it is a relative path to `dir`.
+        mkdir: bool
+            Whether to create output directory if it does not exists.
+        owerwrite: bool
+            If true and `filename` exists, remove the existing file before
+            saving.  The default is to append to an existing ontology.
+        recursive: bool
+            Whether to save imported ontologies recursively.  This is
+            commonly combined with `filename=None`, `dir` and `mkdir`.
+        squash: bool
+            If true, rdflib will be used to save the current ontology
+            together with all its sub-ontologies into `filename`.
+            It make no sense to combine this with `recursive`.
+        write_catalog_file: bool
+            Whether to also write a catalog file to disk.
+        append_catalog: bool
+            Whether to append to an existing catalog file.
+        catalog_file: str | Path
+            Name of catalog file.  If not an absolute path, it is prepended
+            to `dir`.
+        """
+        # pylint: disable=redefined-builtin,too-many-arguments
+        # pylint: disable=too-many-statements,too-many-branches
+        # pylint: disable=too-many-locals,arguments-renamed
+        if not _validate_installed_version(
+            package="rdflib", min_version="6.0.0"
+        ) and format == FMAP.get("ttl", ""):
+            from rdflib import (  # pylint: disable=import-outside-toplevel
+                __version__ as __rdflib_version__,
+            )
+
+            warnings.warn(
+                IncompatibleVersion(
+                    "To correctly convert to Turtle format, rdflib must be "
+                    "version 6.0.0 or greater, however, the detected rdflib "
+                    "version used by your Python interpreter is "
+                    f"{__rdflib_version__!r}. For more information see the "
+                    "'Known issues' section of the README."
+                )
+            )
+
+        revmap = {value: key for key, value in FMAP.items()}
+        if filename is None:
+            if format:
+                fmt = revmap.get(format, format)
+                filename = f"{self.name}.{fmt}"
+            else:
+                raise TypeError("`filename` and `format` cannot both be None.")
+        filename = os.path.join(dir, filename)
+        dir = Path(filename).resolve().parent
+
+        if mkdir:
+            outdir = Path(filename).parent.resolve()
+            if not outdir.exists():
+                outdir.mkdir(parents=True)
+
+        if not format:
+            format = guess_format(filename, fmap=FMAP)
+        fmt = revmap.get(format, format)
+
+        if overwrite and filename and os.path.exists(filename):
+            os.remove(filename)
+
+        EMMO = rdflib.Namespace(  # pylint:disable=invalid-name
+            "http://emmo.info/emmo#"
+        )
+
+        if recursive:
+            if squash:
+                raise ValueError(
+                    "`recursive` and `squash` should not both be true"
+                )
+            layout = directory_layout(self)
+
+            for onto, path in layout.items():
+                fname = Path(dir) / f"{path}.{fmt}"
+                onto.save(
+                    filename=fname,
+                    format=format,
+                    dir=dir,
+                    mkdir=mkdir,
+                    overwrite=overwrite,
+                    recursive=False,
+                    squash=False,
+                    write_catalog_file=False,
+                )
+
+            if write_catalog_file:
+                catalog_files = set()
+                irimap = {}
+                for onto, path in layout.items():
+                    irimap[
+                        onto.get_version(as_iri=True)
+                    ] = f"{dir}/{path}.{fmt}"
+                    catalog_files.add(Path(path).parent / catalog_file)
+
+                for catfile in catalog_files:
+                    write_catalog(
+                        irimap.copy(),
+                        output=catfile,
+                        directory=dir,
+                        append=append_catalog,
+                    )
+
+        elif write_catalog_file:
+            write_catalog(
+                {self.get_version(as_iri=True): filename},
+                output=catalog_file,
+                directory=dir,
+                append=append_catalog,
+            )
+
+        if squash:
+            from rdflib import (  # pylint:disable=import-outside-toplevel
+                URIRef,
+                RDF,
+                OWL,
+            )
+
+            graph = self.world.as_rdflib_graph()
+            graph.namespace_manager.bind("emmo", EMMO)
+
+            # Remove anonymous namespace and imports
+            graph.remove((URIRef("http://anonymous"), RDF.type, OWL.Ontology))
+            imports = list(graph.triples((None, OWL.imports, None)))
+            for triple in imports:
+                graph.remove(triple)
+
+            graph.serialize(destination=filename, format=format)
+        elif format in OWLREADY2_FORMATS:
+            super().save(file=filename, format=fmt)
+        else:
+            # The try-finally clause is needed for cleanup and because
+            # we have to provide delete=False to NamedTemporaryFile
+            # since Windows does not allow to reopen an already open
+            # file.
+            try:
+                with tempfile.NamedTemporaryFile(
+                    suffix=".owl", delete=False
+                ) as handle:
+                    tmpfile = handle.name
+                super().save(tmpfile, format="ntriples")
+                graph = rdflib.Graph()
+                graph.parse(tmpfile, format="ntriples")
+                graph.serialize(destination=filename, format=format)
+            finally:
+                os.remove(tmpfile)
+
+    def get_imported_ontologies(self, recursive=False):
+        """Return a list with imported ontologies.
+
+        If `recursive` is `True`, ontologies imported by imported ontologies
+        are also returned.
+        """
+
+        def rec_imported(onto):
+            for ontology in onto.imported_ontologies:
+                if ontology not in imported:
+                    imported.add(ontology)
+                    rec_imported(ontology)
+
+        if recursive:
+            imported = set()
+            rec_imported(self)
+            return list(imported)
+
+        return self.imported_ontologies
+
+    def get_entities(  # pylint: disable=too-many-arguments
+        self,
+        imported=True,
+        classes=True,
+        individuals=True,
+        object_properties=True,
+        data_properties=True,
+        annotation_properties=True,
+    ):
+        """Return a generator over (optionally) all classes, individuals,
+        object_properties, data_properties and annotation_properties.
+
+        If `imported` is `True`, entities in imported ontologies will also
+        be included.
+        """
+        generator = []
+        if classes:
+            generator.append(self.classes(imported))
+        if individuals:
+            generator.append(self.individuals(imported))
+        if object_properties:
+            generator.append(self.object_properties(imported))
+        if data_properties:
+            generator.append(self.data_properties(imported))
+        if annotation_properties:
+            generator.append(self.annotation_properties(imported))
+        for entity in itertools.chain(*generator):
+            yield entity
+
+    def classes(self, imported=False):
+        """Returns an generator over all classes.
+
+        Arguments:
+            imported: if `True`, entities in imported ontologies
+                are also returned.
+        """
+        return self._entities("classes", imported=imported)
+
+    def _entities(
+        self, entity_type, imported=False
+    ):  # pylint: disable=too-many-branches
+        """Returns an generator over all entities of the desired type.
+        This is a helper function for `classes()`, `individuals()`,
+        `object_properties()`, `data_properties()` and
+        `annotation_properties()`.
+
+        Arguments:
+            entity_type: The type of entity desired given as a string.
+                Can be any of `classes`, `individuals`,
+                `object_properties`, `data_properties` and
+                `annotation_properties`.
+            imported: if `True`, entities in imported ontologies
+                are also returned.
+        """
+
+        generator = []
+        if imported:
+            ontologies = self.get_imported_ontologies(recursive=True)
+            ontologies.append(self)
+            for onto in ontologies:
+                if entity_type == "classes":
+                    for cls in list(onto.classes()):
+                        generator.append(cls)
+                elif entity_type == "individuals":
+                    for ind in list(onto.individuals()):
+                        generator.append(ind)
+                elif entity_type == "object_properties":
+                    for prop in list(onto.object_properties()):
+                        generator.append(prop)
+                elif entity_type == "data_properties":
+                    for prop in list(onto.data_properties()):
+                        generator.append(prop)
+                elif entity_type == "annotation_properties":
+                    for prop in list(onto.annotation_properties()):
+                        generator.append(prop)
+        else:
+            if entity_type == "classes":
+                generator = super().classes()
+            elif entity_type == "individuals":
+                generator = super().individuals()
+            elif entity_type == "object_properties":
+                generator = super().object_properties()
+            elif entity_type == "data_properties":
+                generator = super().data_properties()
+            elif entity_type == "annotation_properties":
+                generator = super().annotation_properties()
+
+        for entity in generator:
+            yield entity
+
+    def individuals(self, imported=False):
+        """Returns an generator over all individuals.
+
+        Arguments:
+            imported: if `True`, entities in imported ontologies
+                are also returned.
+        """
+        return self._entities("individuals", imported=imported)
+
+    def object_properties(self, imported=False):
+        """Returns an generator over all object_properties.
+
+        Arguments:
+            imported: if `True`, entities in imported ontologies
+                are also returned.
+        """
+        return self._entities("object_properties", imported=imported)
+
+    def data_properties(self, imported=False):
+        """Returns an generator over all data_properties.
+
+        Arguments:
+            imported: if `True`, entities in imported ontologies
+                are also returned.
+        """
+        return self._entities("data_properties", imported=imported)
+
+    def annotation_properties(self, imported=False):
+        """Returns an generator over all annotation_properties.
+
+        Arguments:
+            imported: if `True`, entities in imported ontologies
+                are also returned.
+
+        """
+        return self._entities("annotation_properties", imported=imported)
+
+    def get_root_classes(self, imported=False):
+        """Returns a list or root classes."""
+        return [
+            cls
+            for cls in self.classes(imported=imported)
+            if not cls.ancestors().difference(set([cls, owlready2.Thing]))
+        ]
+
+    def get_root_object_properties(self, imported=False):
+        """Returns a list of root object properties."""
+        props = set(self.object_properties(imported=imported))
+        return [p for p in props if not props.intersection(p.is_a)]
+
+    def get_root_data_properties(self, imported=False):
+        """Returns a list of root object properties."""
+        props = set(self.data_properties(imported=imported))
+        return [p for p in props if not props.intersection(p.is_a)]
+
+    def get_roots(self, imported=False):
+        """Returns all class, object_property and data_property roots."""
+        roots = self.get_root_classes(imported=imported)
+        roots.extend(self.get_root_object_properties(imported=imported))
+        roots.extend(self.get_root_data_properties(imported=imported))
+        return roots
+
+    def sync_python_names(self, annotations=("prefLabel", "label", "altLabel")):
+        """Update the `python_name` attribute of all properties.
+
+        The python_name attribute will be set to the first non-empty
+        annotation in the sequence of annotations in `annotations` for
+        the property.
+        """
+
+        def update(gen):
+            for prop in gen:
+                for annotation in annotations:
+                    if hasattr(prop, annotation) and getattr(prop, annotation):
+                        prop.python_name = getattr(prop, annotation).first()
+                        break
+
+        update(
+            self.get_entities(
+                classes=False,
+                individuals=False,
+                object_properties=False,
+                data_properties=False,
+            )
+        )
+        update(
+            self.get_entities(
+                classes=False, individuals=False, annotation_properties=False
+            )
+        )
+
+    def rename_entities(
+        self,
+        annotations=("prefLabel", "label", "altLabel"),
+    ):
+        """Set `name` of all entities to the first non-empty annotation in
+        `annotations`.
+
+        Warning, this method changes all IRIs in the ontology.  However,
+        it may be useful to make the ontology more readable and to work
+        with it together with a triple store.
+        """
+        for entity in self.get_entities():
+            for annotation in annotations:
+                if hasattr(entity, annotation):
+                    name = getattr(entity, annotation).first()
+                    if name:
+                        entity.name = name
+                        break
+
+    def sync_reasoner(
+        self, reasoner="HermiT", include_imported=False, **kwargs
+    ):
+        """Update current ontology by running the given reasoner.
+
+        Supported values for `reasoner` are 'HermiT' (default), Pellet
+        and 'FaCT++'.
+
+        If `include_imported` is true, the reasoner will also reason
+        over imported ontologies.  Note that this may be **very** slow.
+
+        Keyword arguments are passed to the underlying owlready2 function.
+        """
+        if reasoner == "FaCT++":
+            sync = sync_reasoner_factpp
+        elif reasoner == "Pellet":
+            sync = owlready2.sync_reasoner_pellet
+        elif reasoner == "HermiT":
+            sync = owlready2.sync_reasoner_hermit
+        else:
+            raise ValueError(
+                f"Unknown reasoner '{reasoner}'. Supported reasoners "
+                "are 'Pellet', 'HermiT' and 'FaCT++'."
+            )
+
+        # For some reason we must visit all entities once before running
+        # the reasoner...
+        list(self.get_entities())
+
+        with self:
+            if include_imported:
+                sync(self.world, **kwargs)
+            else:
+                sync(self, **kwargs)
+
+    def sync_attributes(  # pylint: disable=too-many-branches
+        self,
+        name_policy=None,
+        name_prefix="",
+        class_docstring="comment",
+        sync_imported=False,
+    ):
+        """This method is intended to be called after you have added new
+        classes (typically via Python) to make sure that attributes like
+        `label` and `comments` are defined.
+
+        If a class, object property, data property or annotation
+        property in the current ontology has no label, the name of
+        the corresponding Python class will be assigned as label.
+
+        If a class, object property, data property or annotation
+        property has no comment, it will be assigned the docstring of
+        the corresponding Python class.
+
+        `name_policy` specify wether and how the names in the ontology
+        should be updated.  Valid values are:
+          None          not changed
+          "uuid"        `name_prefix` followed by a global unique id (UUID).
+                        If the name is already valid accoridng to this standard
+                        it will not be regenerated.
+          "sequential"  `name_prefix` followed a sequantial number.
+        EMMO conventions imply ``name_policy=='uuid'``.
+
+        If `sync_imported` is true, all imported ontologies are also
+        updated.
+
+        The `class_docstring` argument specifies the annotation that
+        class docstrings are mapped to.  Defaults to "comment".
+        """
+        for cls in itertools.chain(
+            self.classes(),
+            self.object_properties(),
+            self.data_properties(),
+            self.annotation_properties(),
+        ):
+            if not hasattr(cls, "prefLabel"):
+                # no prefLabel - create new annotation property..
+                with self:
+                    # pylint: disable=invalid-name,missing-class-docstring
+                    # pylint: disable=unused-variable
+                    class prefLabel(owlready2.label):
+                        pass
+
+                cls.prefLabel = [locstr(cls.__name__, lang="en")]
+            elif not cls.prefLabel:
+                cls.prefLabel.append(locstr(cls.__name__, lang="en"))
+            if class_docstring and hasattr(cls, "__doc__") and cls.__doc__:
+                getattr(cls, class_docstring).append(
+                    locstr(inspect.cleandoc(cls.__doc__), lang="en")
+                )
+
+        for ind in self.individuals():
+            if not hasattr(ind, "prefLabel"):
+                # no prefLabel - create new annotation property..
+                with self:
+                    # pylint: disable=invalid-name,missing-class-docstring
+                    # pylint: disable=function-redefined
+                    class prefLabel(owlready2.label):
+                        iri = "http://www.w3.org/2004/02/skos/core#prefLabel"
+
+                ind.prefLabel = [locstr(ind.name, lang="en")]
+            elif not ind.prefLabel:
+                ind.prefLabel.append(locstr(ind.name, lang="en"))
+
+        chain = itertools.chain(
+            self.classes(),
+            self.individuals(),
+            self.object_properties(),
+            self.data_properties(),
+            self.annotation_properties(),
+        )
+        if name_policy == "uuid":
+            for obj in chain:
+                try:
+                    # Passing the following means that the name is valid
+                    # and need not be regenerated.
+                    if not obj.name.startswith(name_prefix):
+                        raise ValueError
+                    uuid.UUID(obj.name.lstrip(name_prefix), version=5)
+                except ValueError:
+                    obj.name = name_prefix + str(
+                        uuid.uuid5(uuid.NAMESPACE_DNS, obj.name)
+                    )
+        elif name_policy == "sequential":
+            for obj in chain:
+                counter = 0
+                while f"{self.base_iri}{name_prefix}{counter}" in self:
+                    counter += 1
+                obj.name = f"{name_prefix}{counter}"
+        elif name_policy is not None:
+            raise TypeError(f"invalid name_policy: {name_policy!r}")
+
+        if sync_imported:
+            for onto in self.imported_ontologies:
+                onto.sync_attributes()
+
+    def get_relations(self):
+        """Returns a generator for all relations."""
+        warnings.warn(
+            "Ontology.get_relations() is deprecated. Use "
+            "onto.object_properties() instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return self.object_properties()
+
+    def get_annotations(self, entity):
+        """Returns a dict with annotations for `entity`.  Entity may be given
+        either as a ThingClass object or as a label."""
+        warnings.warn(
+            "Ontology.get_annotations(entity) is deprecated. Use "
+            "entity.get_annotations() instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+
+        if isinstance(entity, str):
+            entity = self.get_by_label(entity)
+        res = {"comment": getattr(entity, "comment", "")}
+        for annotation in self.annotation_properties():
+            res[annotation.label.first()] = [
+                obj.strip('"')
+                for _, _, obj in self.get_triples(
+                    entity.storid, annotation.storid, None
+                )
+            ]
+        return res
+
+    def get_branch(  # pylint: disable=too-many-arguments
+        self,
+        root,
+        leaves=(),
+        include_leaves=True,
+        strict_leaves=False,
+        exclude=None,
+        sort=False,
+    ):
+        """Returns a set with all direct and indirect subclasses of `root`.
+        Any subclass found in the sequence `leaves` will be included in
+        the returned list, but its subclasses will not.  The elements
+        of `leaves` may be ThingClass objects or labels.
+
+        Subclasses of any subclass found in the sequence `leaves` will
+        be excluded from the returned list, where the elements of `leaves`
+        may be ThingClass objects or labels.
+
+        If `include_leaves` is true, the leaves are included in the returned
+        list, otherwise they are not.
+
+        If `strict_leaves` is true, any descendant of a leaf will be excluded
+        in the returned set.
+
+        If given, `exclude` may be a sequence of classes, including
+        their subclasses, to exclude from the output.
+
+        If `sort` is True, a list sorted according to depth and label
+        will be returned instead of a set.
+        """
+
+        def _branch(root, leaves):
+            if root not in leaves:
+                branch = {
+                    root,
+                }
+                for cls in root.subclasses():
+                    # Defining a branch is actually quite tricky.  Consider
+                    # the case:
+                    #
+                    #      L isA R
+                    #      A isA L
+                    #      A isA R
+                    #
+                    # where R is the root, L is a leaf and A is a direct
+                    # child of both.  Logically, since A is a child of the
+                    # leaf we want to skip A.  But a strait forward imple-
+                    # mentation will see that A is a child of the root and
+                    # include it.  Requireing that the R should be a strict
+                    # parent of A solves this.
+                    if root in cls.get_parents(strict=True):
+                        branch.update(_branch(cls, leaves))
+            else:
+                branch = (
+                    {
+                        root,
+                    }
+                    if include_leaves
+                    else set()
+                )
+            return branch
+
+        if isinstance(root, str):
+            root = self.get_by_label(root)
+
+        leaves = set(
+            self.get_by_label(leaf) if isinstance(leaf, str) else leaf
+            for leaf in leaves
+        )
+        leaves.discard(root)
+
+        if exclude:
+            exclude = set(
+                self.get_by_label(e) if isinstance(e, str) else e
+                for e in exclude
+            )
+            leaves.update(exclude)
+
+        branch = _branch(root, leaves)
+
+        # Exclude all descendants of any leaf
+        if strict_leaves:
+            descendants = root.descendants()
+            for leaf in leaves:
+                if leaf in descendants:
+                    branch.difference_update(
+                        leaf.descendants(include_self=False)
+                    )
+
+        if exclude:
+            branch.difference_update(exclude)
+
+        # Sort according to depth, then by label
+        if sort:
+            branch = sorted(
+                sorted(branch, key=asstring),
+                key=lambda x: len(x.mro()),
+            )
+
+        return branch
+
+    def is_individual(self, entity):
+        """Returns true if entity is an individual."""
+        if isinstance(entity, str):
+            entity = self.get_by_label(entity)
+        return isinstance(entity, owlready2.Thing)
+
+    # FIXME - deprecate this method as soon the ThingClass property
+    #         `defined_class` works correct in Owlready2
+    def is_defined(self, entity):
+        """Returns true if the entity is a defined class.
+
+        Deprecated, use the `is_defined` property of the classes
+        (ThingClass subclasses) instead.
+        """
+        warnings.warn(
+            "This method is deprecated.  Use the `is_defined` property of "
+            "the classes instad.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        if isinstance(entity, str):
+            entity = self.get_by_label(entity)
+        return hasattr(entity, "equivalent_to") and bool(entity.equivalent_to)
+
+    def get_version(self, as_iri=False) -> str:
+        """Returns the version number of the ontology as inferred from the
+        owl:versionIRI tag or, if owl:versionIRI is not found, from
+        owl:versionINFO.
+
+        If `as_iri` is True, the full versionIRI is returned.
+        """
+        version_iri_storid = self.world._abbreviate(
+            "http://www.w3.org/2002/07/owl#versionIRI"
+        )
+        tokens = self.get_triples(s=self.storid, p=version_iri_storid)
+        if (not tokens) and (as_iri is True):
+            raise TypeError(
+                "No owl:versionIRI "
+                f"in Ontology {self.base_iri!r}. "
+                "Search for owl:versionInfo with as_iri=False"
+            )
+        if tokens:
+            _, _, obj = tokens[0]
+            version_iri = self.world._unabbreviate(obj)
+            if as_iri:
+                return version_iri
+            return infer_version(self.base_iri, version_iri)
+
+        version_info_storid = self.world._abbreviate(
+            "http://www.w3.org/2002/07/owl#versionInfo"
+        )
+        tokens = self.get_triples(s=self.storid, p=version_info_storid)
+        if not tokens:
+            raise TypeError(
+                "No versionIRI or versionInfo " f"in Ontology {self.base_iri!r}"
+            )
+        _, _, version_info = tokens[0]
+        return version_info.split("^^")[0].strip('"')
+
+    def set_version(self, version=None, version_iri=None):
+        """Assign version to ontology by asigning owl:versionIRI.
+
+        If `version` but not `version_iri` is provided, the version
+        IRI will be the combination of `base_iri` and `version`.
+        """
+        _version_iri = "http://www.w3.org/2002/07/owl#versionIRI"
+        version_iri_storid = self.world._abbreviate(_version_iri)
+        if self._has_obj_triple_spo(  # pylint: disable=unexpected-keyword-arg
+            # For some reason _has_obj_triples_spo exists in both
+            # owlready2.namespace.Namespace (with arguments subject/predicate)
+            # and in owlready2.triplelite._GraphManager (with arguments s/p)
+            # owlready2.Ontology inherits from Namespace directly
+            # and pylint checks that.
+            # It actually accesses the one in triplelite.
+            # subject=self.storid, predicate=version_iri_storid
+            s=self.storid,
+            p=version_iri_storid,
+        ):
+            self._del_obj_triple_spo(s=self.storid, p=version_iri_storid)
+
+        if not version_iri:
+            if not version:
+                raise TypeError(
+                    "Either `version` or `version_iri` must be provided"
+                )
+            head, tail = self.base_iri.rstrip("#/").rsplit("/", 1)
+            version_iri = "/".join([head, version, tail])
+
+        self._add_obj_triple_spo(
+            s=self.storid,
+            p=self.world._abbreviate(_version_iri),
+            o=self.world._abbreviate(version_iri),
+        )
+
+    def get_graph(self, **kwargs):
+        """Returns a new graph object.  See  emmo.graph.OntoGraph.
+
+        Note that this method requires the Python graphviz package.
+        """
+        # pylint: disable=import-outside-toplevel,cyclic-import
+        from ontopy.graph import OntoGraph
+
+        return OntoGraph(self, **kwargs)
+
+    @staticmethod
+    def common_ancestors(cls1, cls2):
+        """Return a list of common ancestors for `cls1` and `cls2`."""
+        return set(cls1.ancestors()).intersection(cls2.ancestors())
+
+    def number_of_generations(self, descendant, ancestor):
+        """Return shortest distance from ancestor to descendant"""
+        if ancestor not in descendant.ancestors():
+            raise ValueError("Descendant is not a descendant of ancestor")
+        return self._number_of_generations(descendant, ancestor, 0)
+
+    def _number_of_generations(self, descendant, ancestor, counter):
+        """Recursive help function to number_of_generations(), return
+        distance between a ancestor-descendant pair (counter+1)."""
+        if descendant.name == ancestor.name:
+            return counter
+        try:
+            return min(
+                self._number_of_generations(parent, ancestor, counter + 1)
+                for parent in descendant.get_parents()
+                if ancestor in parent.ancestors()
+            )
+        except ValueError:
+            return counter
+
+    def closest_common_ancestors(self, cls1, cls2):
+        """Returns a list with closest_common_ancestor for cls1 and cls2"""
+        distances = {}
+        for ancestor in self.common_ancestors(cls1, cls2):
+            distances[ancestor] = self.number_of_generations(
+                cls1, ancestor
+            ) + self.number_of_generations(cls2, ancestor)
+        return [
+            ancestor
+            for ancestor, distance in distances.items()
+            if distance == min(distances.values())
+        ]
+
+    @staticmethod
+    def closest_common_ancestor(*classes):
+        """Returns closest_common_ancestor for the given classes."""
+        mros = [cls.mro() for cls in classes]
+        track = defaultdict(int)
+        while mros:
+            for mro in mros:
+                cur = mro.pop(0)
+                track[cur] += 1
+                if track[cur] == len(classes):
+                    return cur
+                if len(mro) == 0:
+                    mros.remove(mro)
+        raise EMMOntoPyException(
+            "A closest common ancestor should always exist !"
+        )
+
+    def get_ancestors(
+        self,
+        classes: "Union[List, ThingClass]",
+        closest: bool = False,
+        generations: int = None,
+        strict: bool = True,
+    ) -> set:
+        """Return ancestors of all classes in `classes`.
+        Args:
+            classes: class(es) for which ancestors should be returned.
+            generations: Include this number of generations, default is all.
+            closest: If True, return all ancestors up to and including the
+                closest common ancestor. Return all if False.
+            strict: If True returns only real ancestors, i.e. `classes` are
+                are not included in the returned set.
+        Returns:
+            Set of ancestors to `classes`.
+        """
+        if not isinstance(classes, Iterable):
+            classes = [classes]
+
+        ancestors = set()
+        if not classes:
+            return ancestors
+
+        def addancestors(entity, counter, subject):
+            if counter > 0:
+                for parent in entity.get_parents(strict=True):
+                    subject.add(parent)
+                    addancestors(parent, counter - 1, subject)
+
+        if closest:
+            if generations is not None:
+                raise ValueError(
+                    "Only one of `generations` or `closest` may be specified."
+                )
+
+            closest_ancestor = self.closest_common_ancestor(*classes)
+            for cls in classes:
+                ancestors.update(
+                    anc
+                    for anc in cls.ancestors()
+                    if closest_ancestor in anc.ancestors()
+                )
+        elif isinstance(generations, int):
+            for entity in classes:
+                addancestors(entity, generations, ancestors)
+        else:
+            ancestors.update(*(cls.ancestors() for cls in classes))
+
+        if strict:
+            return ancestors.difference(classes)
+        return ancestors
+
+    def get_descendants(
+        self,
+        classes: "Union[List, ThingClass]",
+        generations: int = None,
+        common: bool = False,
+    ) -> set:
+        """Return descendants/subclasses of all classes in `classes`.
+        Args:
+            classes: class(es) for which descendants are desired.
+            common: whether to only return descendants common to all classes.
+            generations: Include this number of generations, default is all.
+        Returns:
+            A set of descendants for given number of generations.
+            If 'common'=True, the common descendants are returned
+            within the specified number of generations.
+            'generations' defaults to all.
+        """
+
+        if not isinstance(classes, Iterable):
+            classes = [classes]
+
+        descendants = {name: [] for name in classes}
+
+        def _children_recursively(num, newentity, parent, descendants):
+            """Helper function to get all children up to generation."""
+            for child in self.get_children_of(newentity):
+                descendants[parent].append(child)
+                if num < generations:
+                    _children_recursively(num + 1, child, parent, descendants)
+
+        if generations == 0:
+            return set()
+
+        if not generations:
+            for entity in classes:
+                descendants[entity] = entity.descendants()
+                # only include proper descendants
+                descendants[entity].remove(entity)
+        else:
+            for entity in classes:
+                _children_recursively(1, entity, entity, descendants)
+
+        results = descendants.values()
+        if common is True:
+            return set.intersection(*map(set, results))
+        return set(flatten(results))
+
+    def get_wu_palmer_measure(self, cls1, cls2):
+        """Return Wu-Palmer measure for semantic similarity.
+
+        Returns Wu-Palmer measure for semantic similarity between
+        two concepts.
+        Wu, Palmer; ACL 94: Proceedings of the 32nd annual meeting on
+        Association for Computational Linguistics, June 1994.
+        """
+        cca = self.closest_common_ancestor(cls1, cls2)
+        ccadepth = self.number_of_generations(cca, self.Thing)
+        generations1 = self.number_of_generations(cls1, cca)
+        generations2 = self.number_of_generations(cls2, cca)
+        return 2 * ccadepth / (generations1 + generations2 + 2 * ccadepth)
+
+    def new_entity(
+        self,
+        name: str,
+        parent: Union[
+            ThingClass,
+            ObjectPropertyClass,
+            DataPropertyClass,
+            AnnotationPropertyClass,
+            Iterable,
+        ],
+        entitytype: Optional[
+            Union[
+                str,
+                ThingClass,
+                ObjectPropertyClass,
+                DataPropertyClass,
+                AnnotationPropertyClass,
+            ]
+        ] = "class",
+        preflabel: Optional[str] = None,
+    ) -> Union[
+        ThingClass,
+        ObjectPropertyClass,
+        DataPropertyClass,
+        AnnotationPropertyClass,
+    ]:
+        """Create and return new entity
+
+        Args:
+            name: name of the entity
+            parent: parent(s) of the entity
+            entitytype: type of the entity,
+                default is 'class' (str) 'ThingClass' (owlready2 Python class).
+                Other options
+                are 'data_property', 'object_property',
+                'annotation_property' (strings) or the
+                Python classes ObjectPropertyClass,
+                DataPropertyClass and AnnotationProperty classes.
+            preflabel: if given, add this as a skos:prefLabel annotation
+                to the new entity.  If None (default), `name` will
+                be added as prefLabel if skos:prefLabel is in the ontology
+                and listed in `self.label_annotations`.  Set `preflabel` to
+                False, to avoid assigning a prefLabel.
+
+        Returns:
+            the new entity.
+
+        Throws exception if name consists of more than one word, if type is not
+        one of the allowed types, or if parent is not of the correct type.
+        By default, the parent is Thing.
+
+        """
+        # pylint: disable=invalid-name
+        if " " in name:
+            raise LabelDefinitionError(
+                f"Error in label name definition '{name}': "
+                f"Label consists of more than one word."
+            )
+        parents = tuple(parent) if isinstance(parent, Iterable) else (parent,)
+        if entitytype == "class":
+            parenttype = owlready2.ThingClass
+        elif entitytype == "data_property":
+            parenttype = owlready2.DataPropertyClass
+        elif entitytype == "object_property":
+            parenttype = owlready2.ObjectPropertyClass
+        elif entitytype == "annotation_property":
+            parenttype = owlready2.AnnotationPropertyClass
+        elif entitytype in [
+            ThingClass,
+            ObjectPropertyClass,
+            DataPropertyClass,
+            AnnotationPropertyClass,
+        ]:
+            parenttype = entitytype
+        else:
+            raise EntityClassDefinitionError(
+                f"Error in entity type definition: "
+                f"'{entitytype}' is not a valid entity type."
+            )
+        for thing in parents:
+            if not isinstance(thing, parenttype):
+                raise EntityClassDefinitionError(
+                    f"Error in parent definition: "
+                    f"'{thing}' is not an {parenttype}."
+                )
+
+        with self:
+            entity = types.new_class(name, parents)
+
+            preflabel_iri = "http://www.w3.org/2004/02/skos/core#prefLabel"
+            if preflabel:
+                if not self.world[preflabel_iri]:
+                    pref_label = self.new_annotation_property(
+                        "prefLabel",
+                        parent=[owlready2.AnnotationProperty],
+                    )
+                    pref_label.iri = preflabel_iri
+                entity.prefLabel = english(preflabel)
+            elif (
+                preflabel is None
+                and preflabel_iri in self.label_annotations
+                and self.world[preflabel_iri]
+            ):
+                entity.prefLabel = english(name)
+
+        return entity
+
+    # Method that creates new ThingClass using new_entity
+    def new_class(
+        self, name: str, parent: Union[ThingClass, Iterable]
+    ) -> ThingClass:
+        """Create and return new class.
+
+        Args:
+            name: name of the class
+            parent: parent(s) of the class
+
+        Returns:
+            the new class.
+        """
+        return self.new_entity(name, parent, "class")
+
+    # Method that creates new ObjectPropertyClass using new_entity
+    def new_object_property(
+        self, name: str, parent: Union[ObjectPropertyClass, Iterable]
+    ) -> ObjectPropertyClass:
+        """Create and return new object property.
+
+        Args:
+            name: name of the object property
+            parent: parent(s) of the object property
+
+        Returns:
+            the new object property.
+        """
+        return self.new_entity(name, parent, "object_property")
+
+    # Method that creates new DataPropertyClass using new_entity
+    def new_data_property(
+        self, name: str, parent: Union[DataPropertyClass, Iterable]
+    ) -> DataPropertyClass:
+        """Create and return new data property.
+
+        Args:
+            name: name of the data property
+            parent: parent(s) of the data property
+
+        Returns:
+            the new data property.
+        """
+        return self.new_entity(name, parent, "data_property")
+
+    # Method that creates new AnnotationPropertyClass using new_entity
+    def new_annotation_property(
+        self, name: str, parent: Union[AnnotationPropertyClass, Iterable]
+    ) -> AnnotationPropertyClass:
+        """Create and return new annotation property.
+
+        Args:
+            name: name of the annotation property
+            parent: parent(s) of the annotation property
+
+        Returns:
+            the new annotation property.
+        """
+        return self.new_entity(name, parent, "annotation_property")
+
+    def difference(self, other: owlready2.Ontology) -> set:
+        """Return a set of triples that are in this, but not in the
+        `other` ontology."""
+        # pylint: disable=invalid-name
+        s1 = set(self.get_unabbreviated_triples(blank="_:b"))
+        s2 = set(other.get_unabbreviated_triples(blank="_:b"))
+        return s1.difference(s2)
+
+
+ + + +
+ + + + + + +
+ + + +

+colon_in_label + + + property + writable + + +

+ +
+ +

Whether to accept colon in name-part of IRI. If true, the name cannot be prefixed.

+
+ +
+ + + +
+ + + +

+dir_imported + + + property + writable + + +

+ +
+ +

Whether to include imported ontologies in dir() listing.

+
+ +
+ + + +
+ + + +

+dir_label + + + property + writable + + +

+ +
+ +

Whether to include entity label in dir() listing.

+
+ +
+ + + +
+ + + +

+dir_name + + + property + writable + + +

+ +
+ +

Whether to include entity name in dir() listing.

+
+ +
+ + + +
+ + + +

+dir_preflabel + + + property + writable + + +

+ +
+ +

Whether to include entity prefLabel in dir() listing.

+
+ +
+ + + + + + + +
+ + + +

+add_label_annotation(self, iri) + + +

+ +
+ +

Adds label annotation used by get_by_label().

+ +
+ Source code in ontopy/ontology.py +
def add_label_annotation(self, iri):
+    """Adds label annotation used by get_by_label()."""
+    warnings.warn(
+        "Ontology.add_label_annotations() is deprecated. "
+        "Direct modify the `label_annotations` attribute instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    if hasattr(iri, "iri"):
+        iri = iri.iri
+    if iri not in self.label_annotations:
+        self.label_annotations.append(iri)
+
+
+
+ +
+ + + +
+ + + +

+annotation_properties(self, imported=False) + + +

+ +
+ +

Returns an generator over all annotation_properties.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
imported

if True, entities in imported ontologies +are also returned.

False
+
+ Source code in ontopy/ontology.py +
def annotation_properties(self, imported=False):
+    """Returns an generator over all annotation_properties.
+
+    Arguments:
+        imported: if `True`, entities in imported ontologies
+            are also returned.
+
+    """
+    return self._entities("annotation_properties", imported=imported)
+
+
+
+ +
+ + + +
+ + + +

+classes(self, imported=False) + + +

+ +
+ +

Returns an generator over all classes.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
imported

if True, entities in imported ontologies +are also returned.

False
+
+ Source code in ontopy/ontology.py +
def classes(self, imported=False):
+    """Returns an generator over all classes.
+
+    Arguments:
+        imported: if `True`, entities in imported ontologies
+            are also returned.
+    """
+    return self._entities("classes", imported=imported)
+
+
+
+ +
+ + + +
+ + + +

+closest_common_ancestor(*classes) + + + staticmethod + + +

+ +
+ +

Returns closest_common_ancestor for the given classes.

+ +
+ Source code in ontopy/ontology.py +
@staticmethod
+def closest_common_ancestor(*classes):
+    """Returns closest_common_ancestor for the given classes."""
+    mros = [cls.mro() for cls in classes]
+    track = defaultdict(int)
+    while mros:
+        for mro in mros:
+            cur = mro.pop(0)
+            track[cur] += 1
+            if track[cur] == len(classes):
+                return cur
+            if len(mro) == 0:
+                mros.remove(mro)
+    raise EMMOntoPyException(
+        "A closest common ancestor should always exist !"
+    )
+
+
+
+ +
+ + + +
+ + + +

+closest_common_ancestors(self, cls1, cls2) + + +

+ +
+ +

Returns a list with closest_common_ancestor for cls1 and cls2

+ +
+ Source code in ontopy/ontology.py +
def closest_common_ancestors(self, cls1, cls2):
+    """Returns a list with closest_common_ancestor for cls1 and cls2"""
+    distances = {}
+    for ancestor in self.common_ancestors(cls1, cls2):
+        distances[ancestor] = self.number_of_generations(
+            cls1, ancestor
+        ) + self.number_of_generations(cls2, ancestor)
+    return [
+        ancestor
+        for ancestor, distance in distances.items()
+        if distance == min(distances.values())
+    ]
+
+
+
+ +
+ + + +
+ + + +

+common_ancestors(cls1, cls2) + + + staticmethod + + +

+ +
+ +

Return a list of common ancestors for cls1 and cls2.

+ +
+ Source code in ontopy/ontology.py +
@staticmethod
+def common_ancestors(cls1, cls2):
+    """Return a list of common ancestors for `cls1` and `cls2`."""
+    return set(cls1.ancestors()).intersection(cls2.ancestors())
+
+
+
+ +
+ + + +
+ + + +

+data_properties(self, imported=False) + + +

+ +
+ +

Returns an generator over all data_properties.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
imported

if True, entities in imported ontologies +are also returned.

False
+
+ Source code in ontopy/ontology.py +
def data_properties(self, imported=False):
+    """Returns an generator over all data_properties.
+
+    Arguments:
+        imported: if `True`, entities in imported ontologies
+            are also returned.
+    """
+    return self._entities("data_properties", imported=imported)
+
+
+
+ +
+ + + +
+ + + +

+difference(self, other) + + +

+ +
+ +

Return a set of triples that are in this, but not in the +other ontology.

+ +
+ Source code in ontopy/ontology.py +
def difference(self, other: owlready2.Ontology) -> set:
+    """Return a set of triples that are in this, but not in the
+    `other` ontology."""
+    # pylint: disable=invalid-name
+    s1 = set(self.get_unabbreviated_triples(blank="_:b"))
+    s2 = set(other.get_unabbreviated_triples(blank="_:b"))
+    return s1.difference(s2)
+
+
+
+ +
+ + + +
+ + + +

+get_ancestors(self, classes, closest=False, generations=None, strict=True) + + +

+ +
+ +

Return ancestors of all classes in classes.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
classesUnion[List, ThingClass]

class(es) for which ancestors should be returned.

required
generationsint

Include this number of generations, default is all.

None
closestbool

If True, return all ancestors up to and including the +closest common ancestor. Return all if False.

False
strictbool

If True returns only real ancestors, i.e. classes are +are not included in the returned set.

True
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
set

Set of ancestors to classes.

+
+ Source code in ontopy/ontology.py +
def get_ancestors(
+    self,
+    classes: "Union[List, ThingClass]",
+    closest: bool = False,
+    generations: int = None,
+    strict: bool = True,
+) -> set:
+    """Return ancestors of all classes in `classes`.
+    Args:
+        classes: class(es) for which ancestors should be returned.
+        generations: Include this number of generations, default is all.
+        closest: If True, return all ancestors up to and including the
+            closest common ancestor. Return all if False.
+        strict: If True returns only real ancestors, i.e. `classes` are
+            are not included in the returned set.
+    Returns:
+        Set of ancestors to `classes`.
+    """
+    if not isinstance(classes, Iterable):
+        classes = [classes]
+
+    ancestors = set()
+    if not classes:
+        return ancestors
+
+    def addancestors(entity, counter, subject):
+        if counter > 0:
+            for parent in entity.get_parents(strict=True):
+                subject.add(parent)
+                addancestors(parent, counter - 1, subject)
+
+    if closest:
+        if generations is not None:
+            raise ValueError(
+                "Only one of `generations` or `closest` may be specified."
+            )
+
+        closest_ancestor = self.closest_common_ancestor(*classes)
+        for cls in classes:
+            ancestors.update(
+                anc
+                for anc in cls.ancestors()
+                if closest_ancestor in anc.ancestors()
+            )
+    elif isinstance(generations, int):
+        for entity in classes:
+            addancestors(entity, generations, ancestors)
+    else:
+        ancestors.update(*(cls.ancestors() for cls in classes))
+
+    if strict:
+        return ancestors.difference(classes)
+    return ancestors
+
+
+
+ +
+ + + +
+ + + +

+get_annotations(self, entity) + + +

+ +
+ +

Returns a dict with annotations for entity. Entity may be given +either as a ThingClass object or as a label.

+ +
+ Source code in ontopy/ontology.py +
def get_annotations(self, entity):
+    """Returns a dict with annotations for `entity`.  Entity may be given
+    either as a ThingClass object or as a label."""
+    warnings.warn(
+        "Ontology.get_annotations(entity) is deprecated. Use "
+        "entity.get_annotations() instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+
+    if isinstance(entity, str):
+        entity = self.get_by_label(entity)
+    res = {"comment": getattr(entity, "comment", "")}
+    for annotation in self.annotation_properties():
+        res[annotation.label.first()] = [
+            obj.strip('"')
+            for _, _, obj in self.get_triples(
+                entity.storid, annotation.storid, None
+            )
+        ]
+    return res
+
+
+
+ +
+ + + +
+ + + +

+get_branch(self, root, leaves=(), include_leaves=True, strict_leaves=False, exclude=None, sort=False) + + +

+ +
+ +

Returns a set with all direct and indirect subclasses of root. +Any subclass found in the sequence leaves will be included in +the returned list, but its subclasses will not. The elements +of leaves may be ThingClass objects or labels.

+

Subclasses of any subclass found in the sequence leaves will +be excluded from the returned list, where the elements of leaves +may be ThingClass objects or labels.

+

If include_leaves is true, the leaves are included in the returned +list, otherwise they are not.

+

If strict_leaves is true, any descendant of a leaf will be excluded +in the returned set.

+

If given, exclude may be a sequence of classes, including +their subclasses, to exclude from the output.

+

If sort is True, a list sorted according to depth and label +will be returned instead of a set.

+ +
+ Source code in ontopy/ontology.py +
def get_branch(  # pylint: disable=too-many-arguments
+    self,
+    root,
+    leaves=(),
+    include_leaves=True,
+    strict_leaves=False,
+    exclude=None,
+    sort=False,
+):
+    """Returns a set with all direct and indirect subclasses of `root`.
+    Any subclass found in the sequence `leaves` will be included in
+    the returned list, but its subclasses will not.  The elements
+    of `leaves` may be ThingClass objects or labels.
+
+    Subclasses of any subclass found in the sequence `leaves` will
+    be excluded from the returned list, where the elements of `leaves`
+    may be ThingClass objects or labels.
+
+    If `include_leaves` is true, the leaves are included in the returned
+    list, otherwise they are not.
+
+    If `strict_leaves` is true, any descendant of a leaf will be excluded
+    in the returned set.
+
+    If given, `exclude` may be a sequence of classes, including
+    their subclasses, to exclude from the output.
+
+    If `sort` is True, a list sorted according to depth and label
+    will be returned instead of a set.
+    """
+
+    def _branch(root, leaves):
+        if root not in leaves:
+            branch = {
+                root,
+            }
+            for cls in root.subclasses():
+                # Defining a branch is actually quite tricky.  Consider
+                # the case:
+                #
+                #      L isA R
+                #      A isA L
+                #      A isA R
+                #
+                # where R is the root, L is a leaf and A is a direct
+                # child of both.  Logically, since A is a child of the
+                # leaf we want to skip A.  But a strait forward imple-
+                # mentation will see that A is a child of the root and
+                # include it.  Requireing that the R should be a strict
+                # parent of A solves this.
+                if root in cls.get_parents(strict=True):
+                    branch.update(_branch(cls, leaves))
+        else:
+            branch = (
+                {
+                    root,
+                }
+                if include_leaves
+                else set()
+            )
+        return branch
+
+    if isinstance(root, str):
+        root = self.get_by_label(root)
+
+    leaves = set(
+        self.get_by_label(leaf) if isinstance(leaf, str) else leaf
+        for leaf in leaves
+    )
+    leaves.discard(root)
+
+    if exclude:
+        exclude = set(
+            self.get_by_label(e) if isinstance(e, str) else e
+            for e in exclude
+        )
+        leaves.update(exclude)
+
+    branch = _branch(root, leaves)
+
+    # Exclude all descendants of any leaf
+    if strict_leaves:
+        descendants = root.descendants()
+        for leaf in leaves:
+            if leaf in descendants:
+                branch.difference_update(
+                    leaf.descendants(include_self=False)
+                )
+
+    if exclude:
+        branch.difference_update(exclude)
+
+    # Sort according to depth, then by label
+    if sort:
+        branch = sorted(
+            sorted(branch, key=asstring),
+            key=lambda x: len(x.mro()),
+        )
+
+    return branch
+
+
+
+ +
+ + + +
+ + + +

+get_by_label(self, label, label_annotations=None, prefix=None, imported=True, colon_in_label=None) + + +

+ +
+ +

Returns entity with label annotation label.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
labelstr

label so search for. + May be written as 'label' or 'prefix:label'. + get_by_label('prefix:label') == + get_by_label('label', prefix='prefix').

required
label_annotationsstr

a sequence of label annotation names to look up. + Defaults to the label_annotations property.

None
prefixstr

if provided, it should be the last component of + the base iri of an ontology (with trailing slash (/) or hash + (#) stripped off). The search for a matching label will be + limited to this namespace.

None
importedbool

Whether to also look for label in imported ontologies.

True
colon_in_labelbool

Whether to accept colon (:) in a label or name-part + of IRI. Defaults to the colon_in_label property of self. + Setting this true cannot be combined with prefix.

None

If several entities have the same label, only the one which is +found first is returned.Use get_by_label_all() to get all matches.

+

Note, if different prefixes are provided in the label and via +the prefix argument a warning will be issued and the +prefix argument will take precedence.

+

A NoSuchLabelError is raised if label cannot be found.

+ +
+ Source code in ontopy/ontology.py +
def get_by_label(
+    self,
+    label: str,
+    label_annotations: str = None,
+    prefix: str = None,
+    imported: bool = True,
+    colon_in_label: bool = None,
+):
+    """Returns entity with label annotation `label`.
+
+    Arguments:
+       label: label so search for.
+           May be written as 'label' or 'prefix:label'.
+           get_by_label('prefix:label') ==
+           get_by_label('label', prefix='prefix').
+       label_annotations: a sequence of label annotation names to look up.
+           Defaults to the `label_annotations` property.
+       prefix: if provided, it should be the last component of
+           the base iri of an ontology (with trailing slash (/) or hash
+           (#) stripped off).  The search for a matching label will be
+           limited to this namespace.
+       imported: Whether to also look for `label` in imported ontologies.
+       colon_in_label: Whether to accept colon (:) in a label or name-part
+           of IRI.  Defaults to the `colon_in_label` property of `self`.
+           Setting this true cannot be combined with `prefix`.
+
+    If several entities have the same label, only the one which is
+    found first is returned.Use get_by_label_all() to get all matches.
+
+    Note, if different prefixes are provided in the label and via
+    the `prefix` argument a warning will be issued and the
+    `prefix` argument will take precedence.
+
+    A NoSuchLabelError is raised if `label` cannot be found.
+    """
+    # pylint: disable=too-many-arguments,too-many-branches,invalid-name
+    if not isinstance(label, str):
+        raise TypeError(
+            f"Invalid label definition, must be a string: '{label}'"
+        )
+
+    if label_annotations is None:
+        label_annotations = self.label_annotations
+
+    if colon_in_label is None:
+        colon_in_label = self._colon_in_label
+    if colon_in_label:
+        if prefix:
+            raise ValueError(
+                "`prefix` cannot be combined with `colon_in_label`"
+            )
+    else:
+        splitlabel = label.split(":", 1)
+        if len(splitlabel) == 2 and not splitlabel[1].startswith("//"):
+            label = splitlabel[1]
+            if prefix and prefix != splitlabel[0]:
+                warnings.warn(
+                    f"Prefix given both as argument ({prefix}) "
+                    f"and in label ({splitlabel[0]}). "
+                    "Prefix given in argument takes precedence. "
+                )
+            if not prefix:
+                prefix = splitlabel[0]
+
+    if prefix:
+        entityset = self.get_by_label_all(
+            label,
+            label_annotations=label_annotations,
+            prefix=prefix,
+        )
+        if len(entityset) == 1:
+            return entityset.pop()
+        if len(entityset) > 1:
+            raise AmbiguousLabelError(
+                f"Several entities have the same label '{label}' "
+                f"with prefix '{prefix}'."
+            )
+        raise NoSuchLabelError(
+            f"No label annotations matches for '{label}' "
+            f"with prefix '{prefix}'."
+        )
+
+    # Label is a full IRI
+    entity = self.world[label]
+    if entity:
+        return entity
+
+    get_triples = (
+        self.world._get_data_triples_spod_spod
+        if imported
+        else self._get_data_triples_spod_spod
+    )
+
+    for storid in self._to_storids(label_annotations):
+        for s, _, _, _ in get_triples(None, storid, label, None):
+            return self.world[self._unabbreviate(s)]
+
+    # Special labels
+    if self._special_labels and label in self._special_labels:
+        return self._special_labels[label]
+
+    # Check if label is a name under base_iri
+    entity = self.world[self.base_iri + label]
+    if entity:
+        return entity
+
+    # Check label is the name of an entity
+    for entity in self.get_entities(imported=imported):
+        if label == entity.name:
+            return entity
+
+    raise NoSuchLabelError(f"No label annotations matches '{label}'")
+
+
+
+ +
+ + + +
+ + + +

+get_by_label_all(self, label, label_annotations=None, prefix=None, exact_match=False) + + +

+ +
+ +

Returns set of entities with label annotation label.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
label

label so search for. + May be written as 'label' or 'prefix:label'. Wildcard matching + using glob pattern is also supported if exact_match is set to + false.

required
label_annotations

a sequence of label annotation names to look up. + Defaults to the label_annotations property.

None
prefix

if provided, it should be the last component of + the base iri of an ontology (with trailing slash (/) or hash + (#) stripped off). The search for a matching label will be + limited to this namespace.

None
exact_match

Do not treat "*" and brackets as special characters + when matching. May be useful if your ontology has labels + containing such labels.

False
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Set[Optional[owlready2.entity.EntityClass]]

Set of all matching entities or an empty set if no matches +could be found.

+
+ Source code in ontopy/ontology.py +
def get_by_label_all(
+    self,
+    label,
+    label_annotations=None,
+    prefix=None,
+    exact_match=False,
+) -> "Set[Optional[owlready2.entity.EntityClass]]":
+    """Returns set of entities with label annotation `label`.
+
+    Arguments:
+       label: label so search for.
+           May be written as 'label' or 'prefix:label'.  Wildcard matching
+           using glob pattern is also supported if `exact_match` is set to
+           false.
+       label_annotations: a sequence of label annotation names to look up.
+           Defaults to the `label_annotations` property.
+       prefix: if provided, it should be the last component of
+           the base iri of an ontology (with trailing slash (/) or hash
+           (#) stripped off).  The search for a matching label will be
+           limited to this namespace.
+       exact_match: Do not treat "*" and brackets as special characters
+           when matching.  May be useful if your ontology has labels
+           containing such labels.
+
+    Returns:
+        Set of all matching entities or an empty set if no matches
+        could be found.
+    """
+    if not isinstance(label, str):
+        raise TypeError(
+            f"Invalid label definition, " f"must be a string: {label!r}"
+        )
+    if " " in label:
+        raise ValueError(
+            f"Invalid label definition, {label!r} contains spaces."
+        )
+
+    if label_annotations is None:
+        label_annotations = self.label_annotations
+
+    entities = set()
+
+    # Check label annotations
+    if exact_match:
+        for storid in self._to_storids(label_annotations):
+            entities.update(
+                self.world._get_by_storid(s)
+                for s, _, _ in self.world._get_data_triples_spod_spod(
+                    None, storid, str(label), None
+                )
+            )
+    else:
+        for storid in self._to_storids(label_annotations):
+            label_entity = self._unabbreviate(storid)
+            key = (
+                label_entity.name
+                if hasattr(label_entity, "name")
+                else label_entity
+            )
+            entities.update(self.world.search(**{key: label}))
+
+    if self._special_labels and label in self._special_labels:
+        entities.update(self._special_labels[label])
+
+    # Check name-part of IRI
+    if exact_match:
+        entities.update(
+            ent for ent in self.get_entities() if ent.name == str(label)
+        )
+    else:
+        matches = fnmatch.filter(
+            (ent.name for ent in self.get_entities()), label
+        )
+        entities.update(
+            ent for ent in self.get_entities() if ent.name in matches
+        )
+
+    if prefix:
+        return set(
+            ent
+            for ent in entities
+            if ent.namespace.ontology.prefix == prefix
+        )
+    return entities
+
+
+
+ +
+ + + +
+ + + +

+get_descendants(self, classes, generations=None, common=False) + + +

+ +
+ +

Return descendants/subclasses of all classes in classes.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
classesUnion[List, ThingClass]

class(es) for which descendants are desired.

required
commonbool

whether to only return descendants common to all classes.

False
generationsint

Include this number of generations, default is all.

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
set

A set of descendants for given number of generations. +If 'common'=True, the common descendants are returned +within the specified number of generations. +'generations' defaults to all.

+
+ Source code in ontopy/ontology.py +
def get_descendants(
+    self,
+    classes: "Union[List, ThingClass]",
+    generations: int = None,
+    common: bool = False,
+) -> set:
+    """Return descendants/subclasses of all classes in `classes`.
+    Args:
+        classes: class(es) for which descendants are desired.
+        common: whether to only return descendants common to all classes.
+        generations: Include this number of generations, default is all.
+    Returns:
+        A set of descendants for given number of generations.
+        If 'common'=True, the common descendants are returned
+        within the specified number of generations.
+        'generations' defaults to all.
+    """
+
+    if not isinstance(classes, Iterable):
+        classes = [classes]
+
+    descendants = {name: [] for name in classes}
+
+    def _children_recursively(num, newentity, parent, descendants):
+        """Helper function to get all children up to generation."""
+        for child in self.get_children_of(newentity):
+            descendants[parent].append(child)
+            if num < generations:
+                _children_recursively(num + 1, child, parent, descendants)
+
+    if generations == 0:
+        return set()
+
+    if not generations:
+        for entity in classes:
+            descendants[entity] = entity.descendants()
+            # only include proper descendants
+            descendants[entity].remove(entity)
+    else:
+        for entity in classes:
+            _children_recursively(1, entity, entity, descendants)
+
+    results = descendants.values()
+    if common is True:
+        return set.intersection(*map(set, results))
+    return set(flatten(results))
+
+
+
+ +
+ + + +
+ + + +

+get_entities(self, imported=True, classes=True, individuals=True, object_properties=True, data_properties=True, annotation_properties=True) + + +

+ +
+ +

Return a generator over (optionally) all classes, individuals, +object_properties, data_properties and annotation_properties.

+

If imported is True, entities in imported ontologies will also +be included.

+ +
+ Source code in ontopy/ontology.py +
def get_entities(  # pylint: disable=too-many-arguments
+    self,
+    imported=True,
+    classes=True,
+    individuals=True,
+    object_properties=True,
+    data_properties=True,
+    annotation_properties=True,
+):
+    """Return a generator over (optionally) all classes, individuals,
+    object_properties, data_properties and annotation_properties.
+
+    If `imported` is `True`, entities in imported ontologies will also
+    be included.
+    """
+    generator = []
+    if classes:
+        generator.append(self.classes(imported))
+    if individuals:
+        generator.append(self.individuals(imported))
+    if object_properties:
+        generator.append(self.object_properties(imported))
+    if data_properties:
+        generator.append(self.data_properties(imported))
+    if annotation_properties:
+        generator.append(self.annotation_properties(imported))
+    for entity in itertools.chain(*generator):
+        yield entity
+
+
+
+ +
+ + + +
+ + + +

+get_graph(self, **kwargs) + + +

+ +
+ +

Returns a new graph object. See emmo.graph.OntoGraph.

+

Note that this method requires the Python graphviz package.

+ +
+ Source code in ontopy/ontology.py +
def get_graph(self, **kwargs):
+    """Returns a new graph object.  See  emmo.graph.OntoGraph.
+
+    Note that this method requires the Python graphviz package.
+    """
+    # pylint: disable=import-outside-toplevel,cyclic-import
+    from ontopy.graph import OntoGraph
+
+    return OntoGraph(self, **kwargs)
+
+
+
+ +
+ + + +
+ + + +

+get_imported_ontologies(self, recursive=False) + + +

+ +
+ +

Return a list with imported ontologies.

+

If recursive is True, ontologies imported by imported ontologies +are also returned.

+ +
+ Source code in ontopy/ontology.py +
def get_imported_ontologies(self, recursive=False):
+    """Return a list with imported ontologies.
+
+    If `recursive` is `True`, ontologies imported by imported ontologies
+    are also returned.
+    """
+
+    def rec_imported(onto):
+        for ontology in onto.imported_ontologies:
+            if ontology not in imported:
+                imported.add(ontology)
+                rec_imported(ontology)
+
+    if recursive:
+        imported = set()
+        rec_imported(self)
+        return list(imported)
+
+    return self.imported_ontologies
+
+
+
+ +
+ + + +
+ + + +

+get_relations(self) + + +

+ +
+ +

Returns a generator for all relations.

+ +
+ Source code in ontopy/ontology.py +
def get_relations(self):
+    """Returns a generator for all relations."""
+    warnings.warn(
+        "Ontology.get_relations() is deprecated. Use "
+        "onto.object_properties() instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    return self.object_properties()
+
+
+
+ +
+ + + +
+ + + +

+get_root_classes(self, imported=False) + + +

+ +
+ +

Returns a list or root classes.

+ +
+ Source code in ontopy/ontology.py +
def get_root_classes(self, imported=False):
+    """Returns a list or root classes."""
+    return [
+        cls
+        for cls in self.classes(imported=imported)
+        if not cls.ancestors().difference(set([cls, owlready2.Thing]))
+    ]
+
+
+
+ +
+ + + +
+ + + +

+get_root_data_properties(self, imported=False) + + +

+ +
+ +

Returns a list of root object properties.

+ +
+ Source code in ontopy/ontology.py +
def get_root_data_properties(self, imported=False):
+    """Returns a list of root object properties."""
+    props = set(self.data_properties(imported=imported))
+    return [p for p in props if not props.intersection(p.is_a)]
+
+
+
+ +
+ + + +
+ + + +

+get_root_object_properties(self, imported=False) + + +

+ +
+ +

Returns a list of root object properties.

+ +
+ Source code in ontopy/ontology.py +
def get_root_object_properties(self, imported=False):
+    """Returns a list of root object properties."""
+    props = set(self.object_properties(imported=imported))
+    return [p for p in props if not props.intersection(p.is_a)]
+
+
+
+ +
+ + + +
+ + + +

+get_roots(self, imported=False) + + +

+ +
+ +

Returns all class, object_property and data_property roots.

+ +
+ Source code in ontopy/ontology.py +
def get_roots(self, imported=False):
+    """Returns all class, object_property and data_property roots."""
+    roots = self.get_root_classes(imported=imported)
+    roots.extend(self.get_root_object_properties(imported=imported))
+    roots.extend(self.get_root_data_properties(imported=imported))
+    return roots
+
+
+
+ +
+ + + +
+ + + +

+get_unabbreviated_triples(self, subject=None, predicate=None, obj=None, blank=None) + + +

+ +
+ +

Returns all matching triples unabbreviated.

+

If blank is given, it will be used to represent blank nodes.

+ +
+ Source code in ontopy/ontology.py +
def get_unabbreviated_triples(
+    self, subject=None, predicate=None, obj=None, blank=None
+):
+    """Returns all matching triples unabbreviated.
+
+    If `blank` is given, it will be used to represent blank nodes.
+    """
+    # pylint: disable=invalid-name
+    return _get_unabbreviated_triples(
+        self, subject=subject, predicate=predicate, obj=obj, blank=blank
+    )
+
+
+
+ +
+ + + +
+ + + +

+get_version(self, as_iri=False) + + +

+ +
+ +

Returns the version number of the ontology as inferred from the +owl:versionIRI tag or, if owl:versionIRI is not found, from +owl:versionINFO.

+

If as_iri is True, the full versionIRI is returned.

+ +
+ Source code in ontopy/ontology.py +
def get_version(self, as_iri=False) -> str:
+    """Returns the version number of the ontology as inferred from the
+    owl:versionIRI tag or, if owl:versionIRI is not found, from
+    owl:versionINFO.
+
+    If `as_iri` is True, the full versionIRI is returned.
+    """
+    version_iri_storid = self.world._abbreviate(
+        "http://www.w3.org/2002/07/owl#versionIRI"
+    )
+    tokens = self.get_triples(s=self.storid, p=version_iri_storid)
+    if (not tokens) and (as_iri is True):
+        raise TypeError(
+            "No owl:versionIRI "
+            f"in Ontology {self.base_iri!r}. "
+            "Search for owl:versionInfo with as_iri=False"
+        )
+    if tokens:
+        _, _, obj = tokens[0]
+        version_iri = self.world._unabbreviate(obj)
+        if as_iri:
+            return version_iri
+        return infer_version(self.base_iri, version_iri)
+
+    version_info_storid = self.world._abbreviate(
+        "http://www.w3.org/2002/07/owl#versionInfo"
+    )
+    tokens = self.get_triples(s=self.storid, p=version_info_storid)
+    if not tokens:
+        raise TypeError(
+            "No versionIRI or versionInfo " f"in Ontology {self.base_iri!r}"
+        )
+    _, _, version_info = tokens[0]
+    return version_info.split("^^")[0].strip('"')
+
+
+
+ +
+ + + +
+ + + +

+get_wu_palmer_measure(self, cls1, cls2) + + +

+ +
+ +

Return Wu-Palmer measure for semantic similarity.

+

Returns Wu-Palmer measure for semantic similarity between +two concepts. +Wu, Palmer; ACL 94: Proceedings of the 32nd annual meeting on +Association for Computational Linguistics, June 1994.

+ +
+ Source code in ontopy/ontology.py +
def get_wu_palmer_measure(self, cls1, cls2):
+    """Return Wu-Palmer measure for semantic similarity.
+
+    Returns Wu-Palmer measure for semantic similarity between
+    two concepts.
+    Wu, Palmer; ACL 94: Proceedings of the 32nd annual meeting on
+    Association for Computational Linguistics, June 1994.
+    """
+    cca = self.closest_common_ancestor(cls1, cls2)
+    ccadepth = self.number_of_generations(cca, self.Thing)
+    generations1 = self.number_of_generations(cls1, cca)
+    generations2 = self.number_of_generations(cls2, cca)
+    return 2 * ccadepth / (generations1 + generations2 + 2 * ccadepth)
+
+
+
+ +
+ + + +
+ + + +

+individuals(self, imported=False) + + +

+ +
+ +

Returns an generator over all individuals.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
imported

if True, entities in imported ontologies +are also returned.

False
+
+ Source code in ontopy/ontology.py +
def individuals(self, imported=False):
+    """Returns an generator over all individuals.
+
+    Arguments:
+        imported: if `True`, entities in imported ontologies
+            are also returned.
+    """
+    return self._entities("individuals", imported=imported)
+
+
+
+ +
+ + + +
+ + + +

+is_defined(self, entity) + + +

+ +
+ +

Returns true if the entity is a defined class.

+

Deprecated, use the is_defined property of the classes +(ThingClass subclasses) instead.

+ +
+ Source code in ontopy/ontology.py +
def is_defined(self, entity):
+    """Returns true if the entity is a defined class.
+
+    Deprecated, use the `is_defined` property of the classes
+    (ThingClass subclasses) instead.
+    """
+    warnings.warn(
+        "This method is deprecated.  Use the `is_defined` property of "
+        "the classes instad.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    if isinstance(entity, str):
+        entity = self.get_by_label(entity)
+    return hasattr(entity, "equivalent_to") and bool(entity.equivalent_to)
+
+
+
+ +
+ + + +
+ + + +

+is_individual(self, entity) + + +

+ +
+ +

Returns true if entity is an individual.

+ +
+ Source code in ontopy/ontology.py +
def is_individual(self, entity):
+    """Returns true if entity is an individual."""
+    if isinstance(entity, str):
+        entity = self.get_by_label(entity)
+    return isinstance(entity, owlready2.Thing)
+
+
+
+ +
+ + + +
+ + + +

+load(self, only_local=False, filename=None, format=None, reload=None, reload_if_newer=False, url_from_catalog=None, catalog_file='catalog-v001.xml', emmo_based=True, prefix=None, prefix_emmo=None, **kwargs) + + +

+ +
+ +

Load the ontology.

+
Arguments
+
+

bool

+

Whether to only read local files. This requires that you +have appended the path to the ontology to owlready2.onto_path.

+
+
+

str

+

Path to file to load the ontology from. Defaults to base_iri +provided to get_ontology().

+
+
+

str

+

Format of filename. Default is inferred from filename +extension.

+
+
+

bool

+

Whether to reload the ontology if it is already loaded.

+
+
+

bool

+

Whether to reload the ontology if the source has changed since +last time it was loaded.

+
+
+

bool | None

+

Whether to use catalog file to resolve the location of base_iri. +If None, the catalog file is used if it exists in the same +directory as filename.

+
+
+

str

+

Name of Protègè catalog file in the same folder as the +ontology. This option is used together with only_local and +defaults to "catalog-v001.xml".

+
+
+

bool

+

Whether this is an EMMO-based ontology or not, default True.

+
+

prefix: defaults to self.get_namespace.name if

+
+

bool, default None. If emmo_based is True it

+

defaults to True and sets the prefix of all imported ontologies +with base_iri starting with 'http://emmo.info/emmo' to emmo

+
+
+

Kwargs

+

Additional keyword arguments are passed on to +owlready2.Ontology.load().

+
+ +
+ Source code in ontopy/ontology.py +
def load(  # pylint: disable=too-many-arguments,arguments-renamed
+    self,
+    only_local=False,
+    filename=None,
+    format=None,  # pylint: disable=redefined-builtin
+    reload=None,
+    reload_if_newer=False,
+    url_from_catalog=None,
+    catalog_file="catalog-v001.xml",
+    emmo_based=True,
+    prefix=None,
+    prefix_emmo=None,
+    **kwargs,
+):
+    """Load the ontology.
+
+    Arguments
+    ---------
+    only_local: bool
+        Whether to only read local files.  This requires that you
+        have appended the path to the ontology to owlready2.onto_path.
+    filename: str
+        Path to file to load the ontology from.  Defaults to `base_iri`
+        provided to get_ontology().
+    format: str
+        Format of `filename`.  Default is inferred from `filename`
+        extension.
+    reload: bool
+        Whether to reload the ontology if it is already loaded.
+    reload_if_newer: bool
+        Whether to reload the ontology if the source has changed since
+        last time it was loaded.
+    url_from_catalog: bool | None
+        Whether to use catalog file to resolve the location of `base_iri`.
+        If None, the catalog file is used if it exists in the same
+        directory as `filename`.
+    catalog_file: str
+        Name of Protègè catalog file in the same folder as the
+        ontology.  This option is used together with `only_local` and
+        defaults to "catalog-v001.xml".
+    emmo_based: bool
+        Whether this is an EMMO-based ontology or not, default `True`.
+    prefix: defaults to self.get_namespace.name if
+    prefix_emmo: bool, default None. If emmo_based is True it
+        defaults to True and sets the prefix of all imported ontologies
+        with base_iri starting with 'http://emmo.info/emmo' to emmo
+    kwargs:
+        Additional keyword arguments are passed on to
+        owlready2.Ontology.load().
+    """
+    # TODO: make sure that `only_local` argument is respected...
+
+    if self.loaded:
+        return self
+    self._load(
+        only_local=only_local,
+        filename=filename,
+        format=format,
+        reload=reload,
+        reload_if_newer=reload_if_newer,
+        url_from_catalog=url_from_catalog,
+        catalog_file=catalog_file,
+        **kwargs,
+    )
+
+    # Enable optimised search by get_by_label()
+    if self._special_labels is None and emmo_based:
+        top = self.world["http://www.w3.org/2002/07/owl#topObjectProperty"]
+        self._special_labels = {
+            "Thing": owlready2.Thing,
+            "Nothing": owlready2.Nothing,
+            "topObjectProperty": top,
+            "owl:Thing": owlready2.Thing,
+            "owl:Nothing": owlready2.Nothing,
+            "owl:topObjectProperty": top,
+        }
+    # set prefix if another prefix is desired
+    # if we do this, shouldn't we make the name of all
+    # entities of the given ontology to the same?
+    if prefix:
+        self.prefix = prefix
+    else:
+        self.prefix = self.name
+
+    if emmo_based and prefix_emmo is None:
+        prefix_emmo = True
+    if prefix_emmo:
+        self.set_common_prefix()
+
+    return self
+
+
+
+ +
+ + + +
+ + + +

+new_annotation_property(self, name, parent) + + +

+ +
+ +

Create and return new annotation property.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
namestr

name of the annotation property

required
parentUnion[owlready2.annotation.AnnotationPropertyClass, collections.abc.Iterable]

parent(s) of the annotation property

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
AnnotationPropertyClass

the new annotation property.

+
+ Source code in ontopy/ontology.py +
def new_annotation_property(
+    self, name: str, parent: Union[AnnotationPropertyClass, Iterable]
+) -> AnnotationPropertyClass:
+    """Create and return new annotation property.
+
+    Args:
+        name: name of the annotation property
+        parent: parent(s) of the annotation property
+
+    Returns:
+        the new annotation property.
+    """
+    return self.new_entity(name, parent, "annotation_property")
+
+
+
+ +
+ + + +
+ + + +

+new_class(self, name, parent) + + +

+ +
+ +

Create and return new class.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
namestr

name of the class

required
parentUnion[owlready2.entity.ThingClass, collections.abc.Iterable]

parent(s) of the class

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
ThingClass

the new class.

+
+ Source code in ontopy/ontology.py +
def new_class(
+    self, name: str, parent: Union[ThingClass, Iterable]
+) -> ThingClass:
+    """Create and return new class.
+
+    Args:
+        name: name of the class
+        parent: parent(s) of the class
+
+    Returns:
+        the new class.
+    """
+    return self.new_entity(name, parent, "class")
+
+
+
+ +
+ + + +
+ + + +

+new_data_property(self, name, parent) + + +

+ +
+ +

Create and return new data property.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
namestr

name of the data property

required
parentUnion[owlready2.prop.DataPropertyClass, collections.abc.Iterable]

parent(s) of the data property

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
DataPropertyClass

the new data property.

+
+ Source code in ontopy/ontology.py +
def new_data_property(
+    self, name: str, parent: Union[DataPropertyClass, Iterable]
+) -> DataPropertyClass:
+    """Create and return new data property.
+
+    Args:
+        name: name of the data property
+        parent: parent(s) of the data property
+
+    Returns:
+        the new data property.
+    """
+    return self.new_entity(name, parent, "data_property")
+
+
+
+ +
+ + + +
+ + + +

+new_entity(self, name, parent, entitytype='class', preflabel=None) + + +

+ +
+ +

Create and return new entity

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
namestr

name of the entity

required
parentUnion[owlready2.entity.ThingClass, owlready2.prop.ObjectPropertyClass, owlready2.prop.DataPropertyClass, owlready2.annotation.AnnotationPropertyClass, collections.abc.Iterable]

parent(s) of the entity

required
entitytypeUnion[str, owlready2.entity.ThingClass, owlready2.prop.ObjectPropertyClass, owlready2.prop.DataPropertyClass, owlready2.annotation.AnnotationPropertyClass]

type of the entity, +default is 'class' (str) 'ThingClass' (owlready2 Python class). +Other options +are 'data_property', 'object_property', +'annotation_property' (strings) or the +Python classes ObjectPropertyClass, +DataPropertyClass and AnnotationProperty classes.

'class'
preflabelOptional[str]

if given, add this as a skos:prefLabel annotation +to the new entity. If None (default), name will +be added as prefLabel if skos:prefLabel is in the ontology +and listed in self.label_annotations. Set preflabel to +False, to avoid assigning a prefLabel.

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Union[owlready2.entity.ThingClass, owlready2.prop.ObjectPropertyClass, owlready2.prop.DataPropertyClass, owlready2.annotation.AnnotationPropertyClass]

the new entity.

Throws exception if name consists of more than one word, if type is not +one of the allowed types, or if parent is not of the correct type. +By default, the parent is Thing.

+ +
+ Source code in ontopy/ontology.py +
def new_entity(
+    self,
+    name: str,
+    parent: Union[
+        ThingClass,
+        ObjectPropertyClass,
+        DataPropertyClass,
+        AnnotationPropertyClass,
+        Iterable,
+    ],
+    entitytype: Optional[
+        Union[
+            str,
+            ThingClass,
+            ObjectPropertyClass,
+            DataPropertyClass,
+            AnnotationPropertyClass,
+        ]
+    ] = "class",
+    preflabel: Optional[str] = None,
+) -> Union[
+    ThingClass,
+    ObjectPropertyClass,
+    DataPropertyClass,
+    AnnotationPropertyClass,
+]:
+    """Create and return new entity
+
+    Args:
+        name: name of the entity
+        parent: parent(s) of the entity
+        entitytype: type of the entity,
+            default is 'class' (str) 'ThingClass' (owlready2 Python class).
+            Other options
+            are 'data_property', 'object_property',
+            'annotation_property' (strings) or the
+            Python classes ObjectPropertyClass,
+            DataPropertyClass and AnnotationProperty classes.
+        preflabel: if given, add this as a skos:prefLabel annotation
+            to the new entity.  If None (default), `name` will
+            be added as prefLabel if skos:prefLabel is in the ontology
+            and listed in `self.label_annotations`.  Set `preflabel` to
+            False, to avoid assigning a prefLabel.
+
+    Returns:
+        the new entity.
+
+    Throws exception if name consists of more than one word, if type is not
+    one of the allowed types, or if parent is not of the correct type.
+    By default, the parent is Thing.
+
+    """
+    # pylint: disable=invalid-name
+    if " " in name:
+        raise LabelDefinitionError(
+            f"Error in label name definition '{name}': "
+            f"Label consists of more than one word."
+        )
+    parents = tuple(parent) if isinstance(parent, Iterable) else (parent,)
+    if entitytype == "class":
+        parenttype = owlready2.ThingClass
+    elif entitytype == "data_property":
+        parenttype = owlready2.DataPropertyClass
+    elif entitytype == "object_property":
+        parenttype = owlready2.ObjectPropertyClass
+    elif entitytype == "annotation_property":
+        parenttype = owlready2.AnnotationPropertyClass
+    elif entitytype in [
+        ThingClass,
+        ObjectPropertyClass,
+        DataPropertyClass,
+        AnnotationPropertyClass,
+    ]:
+        parenttype = entitytype
+    else:
+        raise EntityClassDefinitionError(
+            f"Error in entity type definition: "
+            f"'{entitytype}' is not a valid entity type."
+        )
+    for thing in parents:
+        if not isinstance(thing, parenttype):
+            raise EntityClassDefinitionError(
+                f"Error in parent definition: "
+                f"'{thing}' is not an {parenttype}."
+            )
+
+    with self:
+        entity = types.new_class(name, parents)
+
+        preflabel_iri = "http://www.w3.org/2004/02/skos/core#prefLabel"
+        if preflabel:
+            if not self.world[preflabel_iri]:
+                pref_label = self.new_annotation_property(
+                    "prefLabel",
+                    parent=[owlready2.AnnotationProperty],
+                )
+                pref_label.iri = preflabel_iri
+            entity.prefLabel = english(preflabel)
+        elif (
+            preflabel is None
+            and preflabel_iri in self.label_annotations
+            and self.world[preflabel_iri]
+        ):
+            entity.prefLabel = english(name)
+
+    return entity
+
+
+
+ +
+ + + +
+ + + +

+new_object_property(self, name, parent) + + +

+ +
+ +

Create and return new object property.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
namestr

name of the object property

required
parentUnion[owlready2.prop.ObjectPropertyClass, collections.abc.Iterable]

parent(s) of the object property

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
ObjectPropertyClass

the new object property.

+
+ Source code in ontopy/ontology.py +
def new_object_property(
+    self, name: str, parent: Union[ObjectPropertyClass, Iterable]
+) -> ObjectPropertyClass:
+    """Create and return new object property.
+
+    Args:
+        name: name of the object property
+        parent: parent(s) of the object property
+
+    Returns:
+        the new object property.
+    """
+    return self.new_entity(name, parent, "object_property")
+
+
+
+ +
+ + + +
+ + + +

+number_of_generations(self, descendant, ancestor) + + +

+ +
+ +

Return shortest distance from ancestor to descendant

+ +
+ Source code in ontopy/ontology.py +
def number_of_generations(self, descendant, ancestor):
+    """Return shortest distance from ancestor to descendant"""
+    if ancestor not in descendant.ancestors():
+        raise ValueError("Descendant is not a descendant of ancestor")
+    return self._number_of_generations(descendant, ancestor, 0)
+
+
+
+ +
+ + + +
+ + + +

+object_properties(self, imported=False) + + +

+ +
+ +

Returns an generator over all object_properties.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
imported

if True, entities in imported ontologies +are also returned.

False
+
+ Source code in ontopy/ontology.py +
def object_properties(self, imported=False):
+    """Returns an generator over all object_properties.
+
+    Arguments:
+        imported: if `True`, entities in imported ontologies
+            are also returned.
+    """
+    return self._entities("object_properties", imported=imported)
+
+
+
+ +
+ + + +
+ + + +

+remove_label_annotation(self, iri) + + +

+ +
+ +

Removes label annotation used by get_by_label().

+ +
+ Source code in ontopy/ontology.py +
def remove_label_annotation(self, iri):
+    """Removes label annotation used by get_by_label()."""
+    warnings.warn(
+        "Ontology.remove_label_annotations() is deprecated. "
+        "Direct modify the `label_annotations` attribute instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    if hasattr(iri, "iri"):
+        iri = iri.iri
+    try:
+        self.label_annotations.remove(iri)
+    except ValueError:
+        pass
+
+
+
+ +
+ + + +
+ + + +

+rename_entities(self, annotations=('prefLabel', 'label', 'altLabel')) + + +

+ +
+ +

Set name of all entities to the first non-empty annotation in +annotations.

+

Warning, this method changes all IRIs in the ontology. However, +it may be useful to make the ontology more readable and to work +with it together with a triple store.

+ +
+ Source code in ontopy/ontology.py +
def rename_entities(
+    self,
+    annotations=("prefLabel", "label", "altLabel"),
+):
+    """Set `name` of all entities to the first non-empty annotation in
+    `annotations`.
+
+    Warning, this method changes all IRIs in the ontology.  However,
+    it may be useful to make the ontology more readable and to work
+    with it together with a triple store.
+    """
+    for entity in self.get_entities():
+        for annotation in annotations:
+            if hasattr(entity, annotation):
+                name = getattr(entity, annotation).first()
+                if name:
+                    entity.name = name
+                    break
+
+
+
+ +
+ + + +
+ + + +

+save(self, filename=None, format=None, dir='.', mkdir=False, overwrite=False, recursive=False, squash=False, write_catalog_file=False, append_catalog=False, catalog_file='catalog-v001.xml') + + +

+ +
+ +

Writes the ontology to file.

+
Parameters
+
+

None | str | Path

+

Name of file to write to. If None, it defaults to the name +of the ontology with format as file extension.

+
+
+

str

+

Output format. The default is to infer it from filename.

+
+
+

str | Path

+

If filename is a relative path, it is a relative path to dir.

+
+
+

bool

+

Whether to create output directory if it does not exists.

+
+
+

bool

+

If true and filename exists, remove the existing file before +saving. The default is to append to an existing ontology.

+
+
+

bool

+

Whether to save imported ontologies recursively. This is +commonly combined with filename=None, dir and mkdir.

+
+
+

bool

+

If true, rdflib will be used to save the current ontology +together with all its sub-ontologies into filename. +It make no sense to combine this with recursive.

+
+
+

bool

+

Whether to also write a catalog file to disk.

+
+
+

bool

+

Whether to append to an existing catalog file.

+
+
+

str | Path

+

Name of catalog file. If not an absolute path, it is prepended +to dir.

+
+ +
+ Source code in ontopy/ontology.py +
def save(
+    self,
+    filename=None,
+    format=None,
+    dir=".",
+    mkdir=False,
+    overwrite=False,
+    recursive=False,
+    squash=False,
+    write_catalog_file=False,
+    append_catalog=False,
+    catalog_file="catalog-v001.xml",
+):
+    """Writes the ontology to file.
+
+    Parameters
+    ----------
+    filename: None | str | Path
+        Name of file to write to.  If None, it defaults to the name
+        of the ontology with `format` as file extension.
+    format: str
+        Output format. The default is to infer it from `filename`.
+    dir: str | Path
+        If `filename` is a relative path, it is a relative path to `dir`.
+    mkdir: bool
+        Whether to create output directory if it does not exists.
+    owerwrite: bool
+        If true and `filename` exists, remove the existing file before
+        saving.  The default is to append to an existing ontology.
+    recursive: bool
+        Whether to save imported ontologies recursively.  This is
+        commonly combined with `filename=None`, `dir` and `mkdir`.
+    squash: bool
+        If true, rdflib will be used to save the current ontology
+        together with all its sub-ontologies into `filename`.
+        It make no sense to combine this with `recursive`.
+    write_catalog_file: bool
+        Whether to also write a catalog file to disk.
+    append_catalog: bool
+        Whether to append to an existing catalog file.
+    catalog_file: str | Path
+        Name of catalog file.  If not an absolute path, it is prepended
+        to `dir`.
+    """
+    # pylint: disable=redefined-builtin,too-many-arguments
+    # pylint: disable=too-many-statements,too-many-branches
+    # pylint: disable=too-many-locals,arguments-renamed
+    if not _validate_installed_version(
+        package="rdflib", min_version="6.0.0"
+    ) and format == FMAP.get("ttl", ""):
+        from rdflib import (  # pylint: disable=import-outside-toplevel
+            __version__ as __rdflib_version__,
+        )
+
+        warnings.warn(
+            IncompatibleVersion(
+                "To correctly convert to Turtle format, rdflib must be "
+                "version 6.0.0 or greater, however, the detected rdflib "
+                "version used by your Python interpreter is "
+                f"{__rdflib_version__!r}. For more information see the "
+                "'Known issues' section of the README."
+            )
+        )
+
+    revmap = {value: key for key, value in FMAP.items()}
+    if filename is None:
+        if format:
+            fmt = revmap.get(format, format)
+            filename = f"{self.name}.{fmt}"
+        else:
+            raise TypeError("`filename` and `format` cannot both be None.")
+    filename = os.path.join(dir, filename)
+    dir = Path(filename).resolve().parent
+
+    if mkdir:
+        outdir = Path(filename).parent.resolve()
+        if not outdir.exists():
+            outdir.mkdir(parents=True)
+
+    if not format:
+        format = guess_format(filename, fmap=FMAP)
+    fmt = revmap.get(format, format)
+
+    if overwrite and filename and os.path.exists(filename):
+        os.remove(filename)
+
+    EMMO = rdflib.Namespace(  # pylint:disable=invalid-name
+        "http://emmo.info/emmo#"
+    )
+
+    if recursive:
+        if squash:
+            raise ValueError(
+                "`recursive` and `squash` should not both be true"
+            )
+        layout = directory_layout(self)
+
+        for onto, path in layout.items():
+            fname = Path(dir) / f"{path}.{fmt}"
+            onto.save(
+                filename=fname,
+                format=format,
+                dir=dir,
+                mkdir=mkdir,
+                overwrite=overwrite,
+                recursive=False,
+                squash=False,
+                write_catalog_file=False,
+            )
+
+        if write_catalog_file:
+            catalog_files = set()
+            irimap = {}
+            for onto, path in layout.items():
+                irimap[
+                    onto.get_version(as_iri=True)
+                ] = f"{dir}/{path}.{fmt}"
+                catalog_files.add(Path(path).parent / catalog_file)
+
+            for catfile in catalog_files:
+                write_catalog(
+                    irimap.copy(),
+                    output=catfile,
+                    directory=dir,
+                    append=append_catalog,
+                )
+
+    elif write_catalog_file:
+        write_catalog(
+            {self.get_version(as_iri=True): filename},
+            output=catalog_file,
+            directory=dir,
+            append=append_catalog,
+        )
+
+    if squash:
+        from rdflib import (  # pylint:disable=import-outside-toplevel
+            URIRef,
+            RDF,
+            OWL,
+        )
+
+        graph = self.world.as_rdflib_graph()
+        graph.namespace_manager.bind("emmo", EMMO)
+
+        # Remove anonymous namespace and imports
+        graph.remove((URIRef("http://anonymous"), RDF.type, OWL.Ontology))
+        imports = list(graph.triples((None, OWL.imports, None)))
+        for triple in imports:
+            graph.remove(triple)
+
+        graph.serialize(destination=filename, format=format)
+    elif format in OWLREADY2_FORMATS:
+        super().save(file=filename, format=fmt)
+    else:
+        # The try-finally clause is needed for cleanup and because
+        # we have to provide delete=False to NamedTemporaryFile
+        # since Windows does not allow to reopen an already open
+        # file.
+        try:
+            with tempfile.NamedTemporaryFile(
+                suffix=".owl", delete=False
+            ) as handle:
+                tmpfile = handle.name
+            super().save(tmpfile, format="ntriples")
+            graph = rdflib.Graph()
+            graph.parse(tmpfile, format="ntriples")
+            graph.serialize(destination=filename, format=format)
+        finally:
+            os.remove(tmpfile)
+
+
+
+ +
+ + + +
+ + + +

+set_common_prefix(self, iri_base='http://emmo.info/emmo', prefix='emmo', visited=None) + + +

+ +
+ +

Set a common prefix for all imported ontologies +with the same first part of the base_iri.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
iri_basestr

The start of the base_iri to look for. Defaults to +the emmo base_iri http://emmo.info/emmo

'http://emmo.info/emmo'
prefixstr

the desired prefix. Defaults to emmo.

'emmo'
visitedOptional[Set]

Ontologies to skip. Only intended for internal use.

None
+
+ Source code in ontopy/ontology.py +
def set_common_prefix(
+    self,
+    iri_base: str = "http://emmo.info/emmo",
+    prefix: str = "emmo",
+    visited: "Optional[Set]" = None,
+) -> None:
+    """Set a common prefix for all imported ontologies
+    with the same first part of the base_iri.
+
+    Args:
+        iri_base: The start of the base_iri to look for. Defaults to
+            the emmo base_iri http://emmo.info/emmo
+        prefix: the desired prefix. Defaults to emmo.
+        visited: Ontologies to skip. Only intended for internal use.
+    """
+    if visited is None:
+        visited = set()
+    if self.base_iri.startswith(iri_base):
+        self.prefix = prefix
+    for onto in self.imported_ontologies:
+        if not onto in visited:
+            visited.add(onto)
+            onto.set_common_prefix(
+                iri_base=iri_base, prefix=prefix, visited=visited
+            )
+
+
+
+ +
+ + + +
+ + + +

+set_default_label_annotations(self) + + +

+ +
+ +

Sets the default label annotations.

+ +
+ Source code in ontopy/ontology.py +
def set_default_label_annotations(self):
+    """Sets the default label annotations."""
+    warnings.warn(
+        "Ontology.set_default_label_annotations() is deprecated. "
+        "Default label annotations are set by Ontology.__init__(). ",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    self.label_annotations = DEFAULT_LABEL_ANNOTATIONS[:]
+
+
+
+ +
+ + + +
+ + + +

+set_version(self, version=None, version_iri=None) + + +

+ +
+ +

Assign version to ontology by asigning owl:versionIRI.

+

If version but not version_iri is provided, the version +IRI will be the combination of base_iri and version.

+ +
+ Source code in ontopy/ontology.py +
def set_version(self, version=None, version_iri=None):
+    """Assign version to ontology by asigning owl:versionIRI.
+
+    If `version` but not `version_iri` is provided, the version
+    IRI will be the combination of `base_iri` and `version`.
+    """
+    _version_iri = "http://www.w3.org/2002/07/owl#versionIRI"
+    version_iri_storid = self.world._abbreviate(_version_iri)
+    if self._has_obj_triple_spo(  # pylint: disable=unexpected-keyword-arg
+        # For some reason _has_obj_triples_spo exists in both
+        # owlready2.namespace.Namespace (with arguments subject/predicate)
+        # and in owlready2.triplelite._GraphManager (with arguments s/p)
+        # owlready2.Ontology inherits from Namespace directly
+        # and pylint checks that.
+        # It actually accesses the one in triplelite.
+        # subject=self.storid, predicate=version_iri_storid
+        s=self.storid,
+        p=version_iri_storid,
+    ):
+        self._del_obj_triple_spo(s=self.storid, p=version_iri_storid)
+
+    if not version_iri:
+        if not version:
+            raise TypeError(
+                "Either `version` or `version_iri` must be provided"
+            )
+        head, tail = self.base_iri.rstrip("#/").rsplit("/", 1)
+        version_iri = "/".join([head, version, tail])
+
+    self._add_obj_triple_spo(
+        s=self.storid,
+        p=self.world._abbreviate(_version_iri),
+        o=self.world._abbreviate(version_iri),
+    )
+
+
+
+ +
+ + + +
+ + + +

+sync_attributes(self, name_policy=None, name_prefix='', class_docstring='comment', sync_imported=False) + + +

+ +
+ +

This method is intended to be called after you have added new +classes (typically via Python) to make sure that attributes like +label and comments are defined.

+

If a class, object property, data property or annotation +property in the current ontology has no label, the name of +the corresponding Python class will be assigned as label.

+

If a class, object property, data property or annotation +property has no comment, it will be assigned the docstring of +the corresponding Python class.

+

name_policy specify wether and how the names in the ontology +should be updated. Valid values are: + None not changed + "uuid" name_prefix followed by a global unique id (UUID). + If the name is already valid accoridng to this standard + it will not be regenerated. + "sequential" name_prefix followed a sequantial number. +EMMO conventions imply name_policy=='uuid'.

+

If sync_imported is true, all imported ontologies are also +updated.

+

The class_docstring argument specifies the annotation that +class docstrings are mapped to. Defaults to "comment".

+ +
+ Source code in ontopy/ontology.py +
def sync_attributes(  # pylint: disable=too-many-branches
+    self,
+    name_policy=None,
+    name_prefix="",
+    class_docstring="comment",
+    sync_imported=False,
+):
+    """This method is intended to be called after you have added new
+    classes (typically via Python) to make sure that attributes like
+    `label` and `comments` are defined.
+
+    If a class, object property, data property or annotation
+    property in the current ontology has no label, the name of
+    the corresponding Python class will be assigned as label.
+
+    If a class, object property, data property or annotation
+    property has no comment, it will be assigned the docstring of
+    the corresponding Python class.
+
+    `name_policy` specify wether and how the names in the ontology
+    should be updated.  Valid values are:
+      None          not changed
+      "uuid"        `name_prefix` followed by a global unique id (UUID).
+                    If the name is already valid accoridng to this standard
+                    it will not be regenerated.
+      "sequential"  `name_prefix` followed a sequantial number.
+    EMMO conventions imply ``name_policy=='uuid'``.
+
+    If `sync_imported` is true, all imported ontologies are also
+    updated.
+
+    The `class_docstring` argument specifies the annotation that
+    class docstrings are mapped to.  Defaults to "comment".
+    """
+    for cls in itertools.chain(
+        self.classes(),
+        self.object_properties(),
+        self.data_properties(),
+        self.annotation_properties(),
+    ):
+        if not hasattr(cls, "prefLabel"):
+            # no prefLabel - create new annotation property..
+            with self:
+                # pylint: disable=invalid-name,missing-class-docstring
+                # pylint: disable=unused-variable
+                class prefLabel(owlready2.label):
+                    pass
+
+            cls.prefLabel = [locstr(cls.__name__, lang="en")]
+        elif not cls.prefLabel:
+            cls.prefLabel.append(locstr(cls.__name__, lang="en"))
+        if class_docstring and hasattr(cls, "__doc__") and cls.__doc__:
+            getattr(cls, class_docstring).append(
+                locstr(inspect.cleandoc(cls.__doc__), lang="en")
+            )
+
+    for ind in self.individuals():
+        if not hasattr(ind, "prefLabel"):
+            # no prefLabel - create new annotation property..
+            with self:
+                # pylint: disable=invalid-name,missing-class-docstring
+                # pylint: disable=function-redefined
+                class prefLabel(owlready2.label):
+                    iri = "http://www.w3.org/2004/02/skos/core#prefLabel"
+
+            ind.prefLabel = [locstr(ind.name, lang="en")]
+        elif not ind.prefLabel:
+            ind.prefLabel.append(locstr(ind.name, lang="en"))
+
+    chain = itertools.chain(
+        self.classes(),
+        self.individuals(),
+        self.object_properties(),
+        self.data_properties(),
+        self.annotation_properties(),
+    )
+    if name_policy == "uuid":
+        for obj in chain:
+            try:
+                # Passing the following means that the name is valid
+                # and need not be regenerated.
+                if not obj.name.startswith(name_prefix):
+                    raise ValueError
+                uuid.UUID(obj.name.lstrip(name_prefix), version=5)
+            except ValueError:
+                obj.name = name_prefix + str(
+                    uuid.uuid5(uuid.NAMESPACE_DNS, obj.name)
+                )
+    elif name_policy == "sequential":
+        for obj in chain:
+            counter = 0
+            while f"{self.base_iri}{name_prefix}{counter}" in self:
+                counter += 1
+            obj.name = f"{name_prefix}{counter}"
+    elif name_policy is not None:
+        raise TypeError(f"invalid name_policy: {name_policy!r}")
+
+    if sync_imported:
+        for onto in self.imported_ontologies:
+            onto.sync_attributes()
+
+
+
+ +
+ + + +
+ + + +

+sync_python_names(self, annotations=('prefLabel', 'label', 'altLabel')) + + +

+ +
+ +

Update the python_name attribute of all properties.

+

The python_name attribute will be set to the first non-empty +annotation in the sequence of annotations in annotations for +the property.

+ +
+ Source code in ontopy/ontology.py +
def sync_python_names(self, annotations=("prefLabel", "label", "altLabel")):
+    """Update the `python_name` attribute of all properties.
+
+    The python_name attribute will be set to the first non-empty
+    annotation in the sequence of annotations in `annotations` for
+    the property.
+    """
+
+    def update(gen):
+        for prop in gen:
+            for annotation in annotations:
+                if hasattr(prop, annotation) and getattr(prop, annotation):
+                    prop.python_name = getattr(prop, annotation).first()
+                    break
+
+    update(
+        self.get_entities(
+            classes=False,
+            individuals=False,
+            object_properties=False,
+            data_properties=False,
+        )
+    )
+    update(
+        self.get_entities(
+            classes=False, individuals=False, annotation_properties=False
+        )
+    )
+
+
+
+ +
+ + + +
+ + + +

+sync_reasoner(self, reasoner='HermiT', include_imported=False, **kwargs) + + +

+ +
+ +

Update current ontology by running the given reasoner.

+

Supported values for reasoner are 'HermiT' (default), Pellet +and 'FaCT++'.

+

If include_imported is true, the reasoner will also reason +over imported ontologies. Note that this may be very slow.

+

Keyword arguments are passed to the underlying owlready2 function.

+ +
+ Source code in ontopy/ontology.py +
def sync_reasoner(
+    self, reasoner="HermiT", include_imported=False, **kwargs
+):
+    """Update current ontology by running the given reasoner.
+
+    Supported values for `reasoner` are 'HermiT' (default), Pellet
+    and 'FaCT++'.
+
+    If `include_imported` is true, the reasoner will also reason
+    over imported ontologies.  Note that this may be **very** slow.
+
+    Keyword arguments are passed to the underlying owlready2 function.
+    """
+    if reasoner == "FaCT++":
+        sync = sync_reasoner_factpp
+    elif reasoner == "Pellet":
+        sync = owlready2.sync_reasoner_pellet
+    elif reasoner == "HermiT":
+        sync = owlready2.sync_reasoner_hermit
+    else:
+        raise ValueError(
+            f"Unknown reasoner '{reasoner}'. Supported reasoners "
+            "are 'Pellet', 'HermiT' and 'FaCT++'."
+        )
+
+    # For some reason we must visit all entities once before running
+    # the reasoner...
+    list(self.get_entities())
+
+    with self:
+        if include_imported:
+            sync(self.world, **kwargs)
+        else:
+            sync(self, **kwargs)
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + +
+ + + +

+ +World (World) + + + + +

+ +
+ +

A subclass of owlready2.World.

+ +
+ Source code in ontopy/ontology.py +
class World(owlready2.World):
+    """A subclass of owlready2.World."""
+
+    def __init__(self, *args, **kwargs):
+        # Caches stored in the world
+        self._cached_catalogs = {}  # maps url to (mtime, iris, dirs)
+        self._iri_mappings = {}  # all iri mappings loaded so far
+        super().__init__(*args, **kwargs)
+
+    def get_ontology(
+        self,
+        base_iri: str = "emmo-inferred",
+        OntologyClass: "owlready2.Ontology" = None,
+        label_annotations: "Sequence" = None,
+    ) -> "Ontology":
+        # pylint: disable=too-many-branches
+        """Returns a new Ontology from `base_iri`.
+
+        Arguments:
+            base_iri: The base IRI of the ontology. May be one of:
+                - valid URL (possible excluding final .owl or .ttl)
+                - file name (possible excluding final .owl or .ttl)
+                - "emmo": load latest version of asserted EMMO
+                - "emmo-inferred": load latest version of inferred EMMO
+                  (default)
+                - "emmo-development": load latest inferred development
+                  version of EMMO. Until first stable release
+                  emmo-inferred and emmo-development will be the same.
+            OntologyClass: If given and `base_iri` doesn't correspond
+                to an existing ontology, a new ontology is created of
+                this Ontology subclass.  Defaults to `ontopy.Ontology`.
+            label_annotations: Sequence of label IRIs used for accessing
+                entities in the ontology given that they are in the ontology.
+                Label IRIs not in the ontology will need to be added to
+                ontologies in order to be accessible.
+                Defaults to DEFAULT_LABEL_ANNOTATIONS if set to None.
+        """
+        base_iri = base_iri.as_uri() if isinstance(base_iri, Path) else base_iri
+
+        if base_iri == "emmo":
+            base_iri = (
+                "http://emmo-repo.github.io/versions/1.0.0-beta4/emmo.ttl"
+            )
+        elif base_iri == "emmo-inferred":
+            base_iri = (
+                "https://emmo-repo.github.io/versions/1.0.0-beta4/"
+                "emmo-inferred.ttl"
+            )
+        elif base_iri == "emmo-development":
+            base_iri = (
+                "https://emmo-repo.github.io/versions/1.0.0-beta5/"
+                "emmo-inferred.ttl"
+            )
+
+        if base_iri in self.ontologies:
+            onto = self.ontologies[base_iri]
+        elif base_iri + "#" in self.ontologies:
+            onto = self.ontologies[base_iri + "#"]
+        elif base_iri + "/" in self.ontologies:
+            onto = self.ontologies[base_iri + "/"]
+        else:
+            if os.path.exists(base_iri):
+                iri = os.path.abspath(base_iri)
+            elif os.path.exists(base_iri + ".ttl"):
+                iri = os.path.abspath(base_iri + ".ttl")
+            elif os.path.exists(base_iri + ".owl"):
+                iri = os.path.abspath(base_iri + ".owl")
+            else:
+                iri = base_iri
+
+            if iri[-1] not in "/#":
+                iri += "#"
+
+            if OntologyClass is None:
+                OntologyClass = Ontology
+
+            onto = OntologyClass(self, iri)
+
+        if label_annotations:
+            onto.label_annotations = list(label_annotations)
+
+        return onto
+
+    def get_unabbreviated_triples(
+        self, subject=None, predicate=None, obj=None, blank=None
+    ):
+        # pylint: disable=invalid-name
+        """Returns all triples unabbreviated.
+
+        If any of the `subject`, `predicate` or `obj` arguments are given,
+        only matching triples will be returned.
+
+        If `blank` is given, it will be used to represent blank nodes.
+        """
+        return _get_unabbreviated_triples(
+            self, subject=subject, predicate=predicate, obj=obj, blank=blank
+        )
+
+
+ + + +
+ + + + + + + + + + +
+ + + +

+get_ontology(self, base_iri='emmo-inferred', OntologyClass=None, label_annotations=None) + + +

+ +
+ +

Returns a new Ontology from base_iri.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
base_iristr

The base IRI of the ontology. May be one of: +- valid URL (possible excluding final .owl or .ttl) +- file name (possible excluding final .owl or .ttl) +- "emmo": load latest version of asserted EMMO +- "emmo-inferred": load latest version of inferred EMMO + (default) +- "emmo-development": load latest inferred development + version of EMMO. Until first stable release + emmo-inferred and emmo-development will be the same.

'emmo-inferred'
OntologyClassowlready2.Ontology

If given and base_iri doesn't correspond +to an existing ontology, a new ontology is created of +this Ontology subclass. Defaults to ontopy.Ontology.

None
label_annotationsSequence

Sequence of label IRIs used for accessing +entities in the ontology given that they are in the ontology. +Label IRIs not in the ontology will need to be added to +ontologies in order to be accessible. +Defaults to DEFAULT_LABEL_ANNOTATIONS if set to None.

None
+
+ Source code in ontopy/ontology.py +
def get_ontology(
+    self,
+    base_iri: str = "emmo-inferred",
+    OntologyClass: "owlready2.Ontology" = None,
+    label_annotations: "Sequence" = None,
+) -> "Ontology":
+    # pylint: disable=too-many-branches
+    """Returns a new Ontology from `base_iri`.
+
+    Arguments:
+        base_iri: The base IRI of the ontology. May be one of:
+            - valid URL (possible excluding final .owl or .ttl)
+            - file name (possible excluding final .owl or .ttl)
+            - "emmo": load latest version of asserted EMMO
+            - "emmo-inferred": load latest version of inferred EMMO
+              (default)
+            - "emmo-development": load latest inferred development
+              version of EMMO. Until first stable release
+              emmo-inferred and emmo-development will be the same.
+        OntologyClass: If given and `base_iri` doesn't correspond
+            to an existing ontology, a new ontology is created of
+            this Ontology subclass.  Defaults to `ontopy.Ontology`.
+        label_annotations: Sequence of label IRIs used for accessing
+            entities in the ontology given that they are in the ontology.
+            Label IRIs not in the ontology will need to be added to
+            ontologies in order to be accessible.
+            Defaults to DEFAULT_LABEL_ANNOTATIONS if set to None.
+    """
+    base_iri = base_iri.as_uri() if isinstance(base_iri, Path) else base_iri
+
+    if base_iri == "emmo":
+        base_iri = (
+            "http://emmo-repo.github.io/versions/1.0.0-beta4/emmo.ttl"
+        )
+    elif base_iri == "emmo-inferred":
+        base_iri = (
+            "https://emmo-repo.github.io/versions/1.0.0-beta4/"
+            "emmo-inferred.ttl"
+        )
+    elif base_iri == "emmo-development":
+        base_iri = (
+            "https://emmo-repo.github.io/versions/1.0.0-beta5/"
+            "emmo-inferred.ttl"
+        )
+
+    if base_iri in self.ontologies:
+        onto = self.ontologies[base_iri]
+    elif base_iri + "#" in self.ontologies:
+        onto = self.ontologies[base_iri + "#"]
+    elif base_iri + "/" in self.ontologies:
+        onto = self.ontologies[base_iri + "/"]
+    else:
+        if os.path.exists(base_iri):
+            iri = os.path.abspath(base_iri)
+        elif os.path.exists(base_iri + ".ttl"):
+            iri = os.path.abspath(base_iri + ".ttl")
+        elif os.path.exists(base_iri + ".owl"):
+            iri = os.path.abspath(base_iri + ".owl")
+        else:
+            iri = base_iri
+
+        if iri[-1] not in "/#":
+            iri += "#"
+
+        if OntologyClass is None:
+            OntologyClass = Ontology
+
+        onto = OntologyClass(self, iri)
+
+    if label_annotations:
+        onto.label_annotations = list(label_annotations)
+
+    return onto
+
+
+
+ +
+ + + +
+ + + +

+get_unabbreviated_triples(self, subject=None, predicate=None, obj=None, blank=None) + + +

+ +
+ +

Returns all triples unabbreviated.

+

If any of the subject, predicate or obj arguments are given, +only matching triples will be returned.

+

If blank is given, it will be used to represent blank nodes.

+ +
+ Source code in ontopy/ontology.py +
def get_unabbreviated_triples(
+    self, subject=None, predicate=None, obj=None, blank=None
+):
+    # pylint: disable=invalid-name
+    """Returns all triples unabbreviated.
+
+    If any of the `subject`, `predicate` or `obj` arguments are given,
+    only matching triples will be returned.
+
+    If `blank` is given, it will be used to represent blank nodes.
+    """
+    return _get_unabbreviated_triples(
+        self, subject=subject, predicate=predicate, obj=obj, blank=blank
+    )
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + +
+ + + +

+flatten(items) + + +

+ +
+ +

Yield items from any nested iterable.

+ +
+ Source code in ontopy/ontology.py +
def flatten(items):
+    """Yield items from any nested iterable."""
+    for item in items:
+        if isinstance(item, Iterable) and not isinstance(item, (str, bytes)):
+            for sub_item in flatten(item):
+                yield sub_item
+        else:
+            yield item
+
+
+
+ +
+ + + +
+ + + +

+get_ontology(*args, **kwargs) + + +

+ +
+ +

Returns a new Ontology from base_iri.

+

This is a convenient function for calling World.get_ontology().

+ +
+ Source code in ontopy/ontology.py +
def get_ontology(*args, **kwargs):
+    """Returns a new Ontology from `base_iri`.
+
+    This is a convenient function for calling World.get_ontology()."""
+    return World().get_ontology(*args, **kwargs)
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/ontopy/patch/index.html b/0.6.1/api_reference/ontopy/patch/index.html new file mode 100644 index 000000000..422628321 --- /dev/null +++ b/0.6.1/api_reference/ontopy/patch/index.html @@ -0,0 +1,1994 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + patch - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+
+ + + + +

patch

+ + +
+ + +
+ +

This module injects some additional methods into owlready2 classes.

+ + + +
+ + + + + + + + + + + + +
+ + + +

+disjoint_with(self, reduce=False) + + +

+ +
+ +

Returns a generator with all classes that are disjoint with self.

+

If reduce is True, all classes that are a descendant of another class +will be excluded.

+ +
+ Source code in ontopy/patch.py +
def disjoint_with(self, reduce=False):
+    """Returns a generator with all classes that are disjoint with `self`.
+
+    If `reduce` is `True`, all classes that are a descendant of another class
+    will be excluded.
+    """
+    if reduce:
+        disjoint_set = set(self.disjoint_with())
+        for entity in disjoint_set.copy():
+            disjoint_set.difference_update(
+                entity.descendants(include_self=False)
+            )
+        for entity in disjoint_set:
+            yield entity
+    else:
+        for disjoint in self.disjoints():
+            for entity in disjoint.entities:
+                if entity is not self:
+                    yield entity
+
+
+
+ +
+ + + +
+ + + +

+get_annotations(self, all=False, imported=True) + + +

+ +
+ +

Returns a dict with non-empty annotations.

+

If all is True, also annotations with no value are included.

+

If imported is True, also include annotations defined in imported +ontologies.

+ +
+ Source code in ontopy/patch.py +
def get_annotations(
+    self, all=False, imported=True
+):  # pylint: disable=redefined-builtin
+    """Returns a dict with non-empty annotations.
+
+    If `all` is `True`, also annotations with no value are included.
+
+    If `imported` is `True`, also include annotations defined in imported
+    ontologies.
+    """
+    onto = self.namespace.ontology
+
+    annotations = {
+        str(get_preferred_label(_)): _._get_values_for_class(self)
+        for _ in onto.annotation_properties(imported=imported)
+    }
+    if all:
+        return annotations
+    return {key: value for key, value in annotations.items() if value}
+
+
+
+ +
+ + + +
+ + + +

+get_indirect_is_a(self, skip_classes=True) + + +

+ +
+ +

Returns the set of all isSubclassOf relations of self and its ancestors.

+

If skip_classes is True, indirect classes are not included in the +returned set.

+ +
+ Source code in ontopy/patch.py +
def get_indirect_is_a(self, skip_classes=True):
+    """Returns the set of all isSubclassOf relations of self and its ancestors.
+
+    If `skip_classes` is `True`, indirect classes are not included in the
+    returned set.
+    """
+    subclass_relations = set()
+    for entity in reversed(self.mro()):
+        for attr in "is_a", "equivalent_to":
+            if hasattr(entity, attr):
+                lst = getattr(entity, attr)
+                if skip_classes:
+                    subclass_relations.update(
+                        r
+                        for r in lst
+                        if not isinstance(r, owlready2.ThingClass)
+                    )
+                else:
+                    subclass_relations.update(lst)
+
+    subclass_relations.update(self.is_a)
+    return subclass_relations
+
+
+
+ +
+ + + +
+ + + +

+get_parents(self, strict=False) + + +

+ +
+ +

Returns a list of all parents.

+

If strict is True, parents that are parents of other parents are +excluded.

+ +
+ Source code in ontopy/patch.py +
def get_parents(self, strict=False):
+    """Returns a list of all parents.
+
+    If `strict` is `True`, parents that are parents of other parents are
+    excluded.
+    """
+    if strict:
+        parents = self.get_parents()
+        for entity in parents.copy():
+            parents.difference_update(entity.ancestors(include_self=False))
+        return parents
+    if isinstance(self, ThingClass):
+        return {cls for cls in self.is_a if isinstance(cls, ThingClass)}
+    if isinstance(self, owlready2.ObjectPropertyClass):
+        return {
+            cls
+            for cls in self.is_a
+            if isinstance(cls, owlready2.ObjectPropertyClass)
+        }
+    raise EMMOntoPyException(
+        "self has no parents - this should not be possible!"
+    )
+
+
+
+ +
+ + + +
+ + + +

+get_preferred_label(self) + + +

+ +
+ +

Returns the preferred label as a string (not list).

+

The following heuristics is used: + - if prefLabel annotation property exists, returns the first prefLabel + - if label annotation property exists, returns the first label + - otherwise return the name

+ +
+ Source code in ontopy/patch.py +
def get_preferred_label(self):
+    """Returns the preferred label as a string (not list).
+
+    The following heuristics is used:
+      - if prefLabel annotation property exists, returns the first prefLabel
+      - if label annotation property exists, returns the first label
+      - otherwise return the name
+    """
+    if hasattr(self, "prefLabel") and self.prefLabel:
+        return self.prefLabel[0]
+    if hasattr(self, "label") and self.label:
+        return self.label.first()
+    return self.name
+
+
+
+ +
+ + + +
+ + + +

+get_typename(self) + + +

+ +
+ +

Get restriction type label/name.

+ +
+ Source code in ontopy/patch.py +
def get_typename(self):
+    """Get restriction type label/name."""
+    return owlready2.class_construct._restriction_type_2_label[self.type]
+
+
+
+ +
+ + + +
+ + + +

+has(self, name) + + +

+ +
+ +

Returns true if name

+ +
+ Source code in ontopy/patch.py +
def has(self, name):
+    """Returns true if `name`"""
+    return name in set(self.keys())
+
+
+
+ +
+ + + +
+ + + +

+items(self) + + +

+ +
+ +

Return a generator over annotation property (name, value_list) +pairs associates with this ontology.

+ +
+ Source code in ontopy/patch.py +
def items(self):
+    """Return a generator over annotation property (name, value_list)
+    pairs associates with this ontology."""
+    namespace = self.namespace
+    for annotation in namespace.annotation_properties():
+        if namespace._has_data_triple_spod(
+            s=namespace.storid, p=annotation.storid
+        ):
+            yield annotation, getattr(self, annotation.name)
+
+
+
+ +
+ + + +
+ + + +

+keys(self) + + +

+ +
+ +

Return a generator over annotation property names associated +with this ontology.

+ +
+ Source code in ontopy/patch.py +
def keys(self):
+    """Return a generator over annotation property names associated
+    with this ontology."""
+    namespace = self.namespace
+    for annotation in namespace.annotation_properties():
+        if namespace._has_data_triple_spod(
+            s=namespace.storid, p=annotation.storid
+        ):
+            yield annotation
+
+
+
+ +
+ + + +
+ + + +

+namespace_init(self, world_or_ontology, base_iri, name=None) + + +

+ +
+ +

init function for the Namespace class.

+ +
+ Source code in ontopy/patch.py +
def namespace_init(self, world_or_ontology, base_iri, name=None):
+    """__init__ function for the `Namespace` class."""
+    orig_namespace_init(self, world_or_ontology, base_iri, name)
+    if self.name.endswith(".ttl"):
+        self.name = self.name[:-4]
+
+
+
+ +
+ + + +
+ + + +

+render_func(entity) + + +

+ +
+ +

Improve default rendering of entities.

+ +
+ Source code in ontopy/patch.py +
def render_func(entity):
+    """Improve default rendering of entities."""
+    if hasattr(entity, "prefLabel") and entity.prefLabel:
+        name = entity.prefLabel[0]
+    elif hasattr(entity, "label") and entity.label:
+        name = entity.label[0]
+    elif hasattr(entity, "altLabel") and entity.altLabel:
+        name = entity.altLabel[0]
+    else:
+        name = entity.name
+    return f"{entity.namespace.name}.{name}"
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/api_reference/ontopy/utils/index.html b/0.6.1/api_reference/ontopy/utils/index.html new file mode 100644 index 000000000..4de12640a --- /dev/null +++ b/0.6.1/api_reference/ontopy/utils/index.html @@ -0,0 +1,3422 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + utils - EMMOntoPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+
+ + + + +

utils

+ + +
+ + +
+ +

Some generic utility functions.

+ + + +
+ + + + + + + + + +
+ + + +

+ +AmbiguousLabelError (LookupError, AttributeError, EMMOntoPyException) + + + + +

+ +
+ +

Error raised when a label is ambiguous.

+ +
+ Source code in ontopy/utils.py +
class AmbiguousLabelError(LookupError, AttributeError, EMMOntoPyException):
+    """Error raised when a label is ambiguous."""
+
+
+ + +
+ +
+ + + +
+ + + +

+ +EMMOntoPyException (Exception) + + + + +

+ +
+ +

A BaseException class for EMMOntoPy

+ +
+ Source code in ontopy/utils.py +
class EMMOntoPyException(Exception):
+    """A BaseException class for EMMOntoPy"""
+
+
+ + +
+ +
+ + + +
+ + + +

+ +EMMOntoPyWarning (Warning) + + + + +

+ +
+ +

A BaseWarning class for EMMOntoPy

+ +
+ Source code in ontopy/utils.py +
class EMMOntoPyWarning(Warning):
+    """A BaseWarning class for EMMOntoPy"""
+
+
+ + +
+ +
+ + + +
+ + + +

+ +EntityClassDefinitionError (EMMOntoPyException) + + + + +

+ +
+ +

Error in ThingClass definition.

+ +
+ Source code in ontopy/utils.py +
class EntityClassDefinitionError(EMMOntoPyException):
+    """Error in ThingClass definition."""
+
+
+ + +
+ +
+ + + +
+ + + +

+ +IncompatibleVersion (EMMOntoPyWarning) + + + + +

+ +
+ +

An installed dependency version may be incompatible with a functionality +of this package - or rather an outcome of a functionality. +This is not critical, hence this is only a warning.

+ +
+ Source code in ontopy/utils.py +
class IncompatibleVersion(EMMOntoPyWarning):
+    """An installed dependency version may be incompatible with a functionality
+    of this package - or rather an outcome of a functionality.
+    This is not critical, hence this is only a warning."""
+
+
+ + +
+ +
+ + + +
+ + + +

+ +IndividualWarning (EMMOntoPyWarning) + + + + +

+ +
+ +

A warning related to an individual, e.g. punning.

+ +
+ Source code in ontopy/utils.py +
class IndividualWarning(EMMOntoPyWarning):
+    """A warning related to an individual, e.g. punning."""
+
+
+ + +
+ +
+ + + +
+ + + +

+ +LabelDefinitionError (EMMOntoPyException) + + + + +

+ +
+ +

Error in label definition.

+ +
+ Source code in ontopy/utils.py +
class LabelDefinitionError(EMMOntoPyException):
+    """Error in label definition."""
+
+
+ + +
+ +
+ + + +
+ + + +

+ +NoSuchLabelError (LookupError, AttributeError, EMMOntoPyException) + + + + +

+ +
+ +

Error raised when a label cannot be found.

+ +
+ Source code in ontopy/utils.py +
class NoSuchLabelError(LookupError, AttributeError, EMMOntoPyException):
+    """Error raised when a label cannot be found."""
+
+
+ + +
+ +
+ + + +
+ + + +

+ +ReadCatalogError (OSError) + + + + +

+ +
+ +

Error reading catalog file.

+ +
+ Source code in ontopy/utils.py +
class ReadCatalogError(IOError):
+    """Error reading catalog file."""
+
+
+ + +
+ +
+ + + +
+ + + +

+ +UnknownVersion (EMMOntoPyException) + + + + +

+ +
+ +

Cannot retrieve version from a package.

+ +
+ Source code in ontopy/utils.py +
class UnknownVersion(EMMOntoPyException):
+    """Cannot retrieve version from a package."""
+
+
+ + +
+ +
+ + + + +
+ + + +

+annotate_source(onto, imported=True) + + +

+ +
+ +

Annotate all entities with the base IRI of the ontology using +rdfs:isDefinedBy annotations.

+

If imported is true, all entities in imported sub-ontologies will +also be annotated.

+

This is contextual information that is otherwise lost when the ontology +is squashed and/or inferred.

+ +
+ Source code in ontopy/utils.py +
def annotate_source(onto, imported=True):
+    """Annotate all entities with the base IRI of the ontology using
+    `rdfs:isDefinedBy` annotations.
+
+    If `imported` is true, all entities in imported sub-ontologies will
+    also be annotated.
+
+    This is contextual information that is otherwise lost when the ontology
+    is squashed and/or inferred.
+    """
+    source = onto._abbreviate(
+        "http://www.w3.org/2000/01/rdf-schema#isDefinedBy"
+    )
+    for entity in onto.get_entities(imported=imported):
+        triple = (
+            entity.storid,
+            source,
+            onto._abbreviate(entity.namespace.ontology.base_iri),
+        )
+        if not onto._has_obj_triple_spo(*triple):
+            onto._add_obj_triple_spo(*triple)
+
+
+
+ +
+ + + +
+ + + +

+asstring(expr, link='{label}', recursion_depth=0, exclude_object=False, ontology=None) + + +

+ +
+ +

Returns a string representation of expr.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
expr

The entity, restriction or a logical expression or these +to represent.

required
link

A template for links. May contain the following variables: +- {iri}: The full IRI of the concept. +- {name}: Name-part of IRI. +- {ref}: "#{name}" if the base iri of hte ontology has the same + root as {iri}, otherwise "{iri}". +- {label}: The label of the concept. +- {lowerlabel}: The label of the concept in lower case and with + spaces replaced with hyphens.

'{label}'
recursion_depth

Recursion depth. Only intended for internal use.

0
exclude_object

If true, the object will be excluded in restrictions.

False
ontology

Ontology object.

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
str

String representation of expr.

+
+ Source code in ontopy/utils.py +
def asstring(  # pylint: disable=too-many-return-statements,too-many-branches,too-many-statements
+    expr,
+    link="{label}",
+    recursion_depth=0,
+    exclude_object=False,
+    ontology=None,
+) -> str:
+    """Returns a string representation of `expr`.
+
+    Arguments:
+        expr: The entity, restriction or a logical expression or these
+            to represent.
+        link: A template for links.  May contain the following variables:
+            - {iri}: The full IRI of the concept.
+            - {name}: Name-part of IRI.
+            - {ref}: "#{name}" if the base iri of hte ontology has the same
+              root as {iri}, otherwise "{iri}".
+            - {label}: The label of the concept.
+            - {lowerlabel}: The label of the concept in lower case and with
+              spaces replaced with hyphens.
+        recursion_depth: Recursion depth. Only intended for internal use.
+        exclude_object: If true, the object will be excluded in restrictions.
+        ontology: Ontology object.
+
+    Returns:
+        String representation of `expr`.
+    """
+    if ontology is None:
+        ontology = expr.ontology
+
+    def fmt(entity):
+        """Returns the formatted label of an entity."""
+        if isinstance(entity, str):
+            if ontology and ontology.world[entity]:
+                iri = ontology.world[entity].iri
+            elif (
+                ontology
+                and re.match("^[a-zA-Z0-9_+-]+$", entity)
+                and entity in ontology
+            ):
+                iri = ontology[entity].iri
+            else:
+                # This may not be a valid IRI, but the best we can do
+                iri = entity
+            label = entity
+        else:
+            iri = entity.iri
+            label = get_label(entity)
+        name = getiriname(iri)
+        start = iri.split("#", 1)[0] if "#" in iri else iri.rsplit("/", 1)[0]
+        ref = f"#{name}" if ontology.base_iri.startswith(start) else iri
+        return link.format(
+            entity=entity,
+            name=name,
+            ref=ref,
+            iri=iri,
+            label=label,
+            lowerlabel=label.lower().replace(" ", "-"),
+        )
+
+    if isinstance(expr, str):
+        # return link.format(name=expr)
+        return fmt(expr)
+    if isinstance(expr, owlready2.Restriction):
+        rlabel = owlready2.class_construct._restriction_type_2_label[expr.type]
+
+        if isinstance(
+            expr.property,
+            (owlready2.ObjectPropertyClass, owlready2.DataPropertyClass),
+        ):
+            res = fmt(expr.property)
+        elif isinstance(expr.property, owlready2.Inverse):
+            string = asstring(
+                expr.property.property,
+                link,
+                recursion_depth + 1,
+                ontology=ontology,
+            )
+            res = f"Inverse({string})"
+        else:
+            print(
+                f"*** WARNING: unknown restriction property: {expr.property!r}"
+            )
+            res = fmt(expr.property)
+
+        if not rlabel:
+            pass
+        elif expr.type in (owlready2.MIN, owlready2.MAX, owlready2.EXACTLY):
+            res += f" {rlabel} {expr.cardinality}"
+        elif expr.type in (
+            owlready2.SOME,
+            owlready2.ONLY,
+            owlready2.VALUE,
+            owlready2.HAS_SELF,
+        ):
+            res += f" {rlabel}"
+        else:
+            print("*** WARNING: unknown relation", expr, rlabel)
+            res += f" {rlabel}"
+
+        if not exclude_object:
+            string = asstring(
+                expr.value, link, recursion_depth + 1, ontology=ontology
+            )
+            res += (
+                f" {string!r}" if isinstance(expr.value, str) else f" {string}"
+            )
+        return res
+    if isinstance(expr, owlready2.Or):
+        res = " or ".join(
+            [
+                asstring(c, link, recursion_depth + 1, ontology=ontology)
+                for c in expr.Classes
+            ]
+        )
+        return res if recursion_depth == 0 else f"({res})"
+    if isinstance(expr, owlready2.And):
+        res = " and ".join(
+            [
+                asstring(c, link, recursion_depth + 1, ontology=ontology)
+                for c in expr.Classes
+            ]
+        )
+        return res if recursion_depth == 0 else f"({res})"
+    if isinstance(expr, owlready2.Not):
+        string = asstring(
+            expr.Class, link, recursion_depth + 1, ontology=ontology
+        )
+        return f"not {string}"
+    if isinstance(expr, owlready2.ThingClass):
+        return fmt(expr)
+    if isinstance(expr, owlready2.PropertyClass):
+        return fmt(expr)
+    if isinstance(expr, owlready2.Thing):  # instance (individual)
+        return fmt(expr)
+    if isinstance(expr, owlready2.class_construct.Inverse):
+        return f"inverse({fmt(expr.property)})"
+    if isinstance(expr, owlready2.disjoint.AllDisjoint):
+        return fmt(expr)
+
+    if isinstance(expr, (bool, int, float)):
+        return repr(expr)
+    # Check for subclasses
+    if inspect.isclass(expr):
+        if issubclass(expr, (bool, int, float, str)):
+            return fmt(expr.__class__.__name__)
+        if issubclass(expr, datetime.date):
+            return "date"
+        if issubclass(expr, datetime.time):
+            return "datetime"
+        if issubclass(expr, datetime.datetime):
+            return "datetime"
+
+    raise RuntimeError(f"Unknown expression: {expr!r} (type: {type(expr)!r})")
+
+
+
+ +
+ + + +
+ + + +

+camelsplit(string) + + +

+ +
+ +

Splits CamelCase string before upper case letters (except +if there is a sequence of upper case letters).

+ +
+ Source code in ontopy/utils.py +
def camelsplit(string):
+    """Splits CamelCase string before upper case letters (except
+    if there is a sequence of upper case letters)."""
+    if len(string) < 2:
+        return string
+    result = []
+    prev_lower = False
+    prev_isspace = True
+    char = string[0]
+    for next_char in string[1:]:
+        if (not prev_isspace and char.isupper() and next_char.islower()) or (
+            prev_lower and char.isupper()
+        ):
+            result.append(" ")
+        result.append(char)
+        prev_lower = char.islower()
+        prev_isspace = char.isspace()
+        char = next_char
+    result.append(char)
+    return "".join(result)
+
+
+
+ +
+ + + +
+ + + +

+convert_imported(input_ontology, output_ontology, input_format=None, output_format='xml', url_from_catalog=None, catalog_file='catalog-v001.xml') + + +

+ +
+ +

Convert imported ontologies.

+

Store the output in a directory structure matching the source +files. This require catalog file(s) to be present.

+
+

Warning

+

To convert to Turtle (.ttl) format, you must have installed +rdflib>=6.0.0. See Known issues for +more information.

+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
input_ontologyUnion[Path, str]

input ontology file name

required
output_ontologyUnion[Path, str]

output ontology file path. The directory part of +output will be the root of the generated directory structure

required
input_formatOptional[str]

input format. The default is to infer from +input_ontology

None
output_formatstr

output format. The default is to infer from +output_ontology

'xml'
url_from_catalogOptional[bool]

Whether to read urls form catalog file. +If False, the catalog file will be used if it exists.

None
catalog_filestr

name of catalog file, that maps ontology IRIs to +local file names

'catalog-v001.xml'
+
+ Source code in ontopy/utils.py +
def convert_imported(  # pylint: disable=too-many-arguments,too-many-locals
+    input_ontology: "Union[Path, str]",
+    output_ontology: "Union[Path, str]",
+    input_format: "Optional[str]" = None,
+    output_format: str = "xml",
+    url_from_catalog: "Optional[bool]" = None,
+    catalog_file: str = "catalog-v001.xml",
+):
+    """Convert imported ontologies.
+
+    Store the output in a directory structure matching the source
+    files.  This require catalog file(s) to be present.
+
+    Warning:
+        To convert to Turtle (`.ttl`) format, you must have installed
+        `rdflib>=6.0.0`. See [Known issues](../../../#known-issues) for
+        more information.
+
+    Args:
+        input_ontology: input ontology file name
+        output_ontology: output ontology file path. The directory part of
+            `output` will be the root of the generated directory structure
+        input_format: input format. The default is to infer from
+            `input_ontology`
+        output_format: output format. The default is to infer from
+            `output_ontology`
+        url_from_catalog: Whether to read urls form catalog file.
+            If False, the catalog file will be used if it exists.
+        catalog_file: name of catalog file, that maps ontology IRIs to
+            local file names
+    """
+    inroot = os.path.dirname(os.path.abspath(input_ontology))
+    outroot = os.path.dirname(os.path.abspath(output_ontology))
+    outext = os.path.splitext(output_ontology)[1]
+
+    if url_from_catalog is None:
+        url_from_catalog = os.path.exists(os.path.join(inroot, catalog_file))
+
+    if url_from_catalog:
+        iris, dirs = read_catalog(
+            inroot, catalog_file=catalog_file, recursive=True, return_paths=True
+        )
+
+        # Create output dirs and copy catalog files
+        for indir in dirs:
+            outdir = os.path.normpath(
+                os.path.join(outroot, os.path.relpath(indir, inroot))
+            )
+            if not os.path.exists(outdir):
+                os.makedirs(outdir)
+            with open(
+                os.path.join(indir, catalog_file), mode="rt", encoding="utf8"
+            ) as handle:
+                content = handle.read()
+            for path in iris.values():
+                newpath = os.path.splitext(path)[0] + outext
+                content = content.replace(
+                    os.path.basename(path), os.path.basename(newpath)
+                )
+            with open(
+                os.path.join(outdir, catalog_file), mode="wt", encoding="utf8"
+            ) as handle:
+                handle.write(content)
+    else:
+        iris = {}
+
+    outpaths = set()
+
+    def recur(graph, outext):
+        for imported in graph.objects(
+            predicate=URIRef("http://www.w3.org/2002/07/owl#imports")
+        ):
+            inpath = iris.get(str(imported), str(imported))
+            if inpath.startswith(("http://", "https://", "ftp://")):
+                outpath = os.path.join(outroot, inpath.split("/")[-1])
+            else:
+                outpath = os.path.join(outroot, os.path.relpath(inpath, inroot))
+            outpath = os.path.splitext(os.path.normpath(outpath))[0] + outext
+            if outpath not in outpaths:
+                outpaths.add(outpath)
+                fmt = (
+                    input_format
+                    if input_format
+                    else guess_format(inpath, fmap=FMAP)
+                )
+                new_graph = Graph()
+                new_graph.parse(iris.get(inpath, inpath), format=fmt)
+                new_graph.serialize(destination=outpath, format=output_format)
+                recur(new_graph, outext)
+
+    # Write output files
+    fmt = (
+        input_format
+        if input_format
+        else guess_format(input_ontology, fmap=FMAP)
+    )
+
+    if not _validate_installed_version(
+        package="rdflib", min_version="6.0.0"
+    ) and (output_format == FMAP.get("ttl", "") or outext == "ttl"):
+        from rdflib import (  # pylint: disable=import-outside-toplevel
+            __version__ as __rdflib_version__,
+        )
+
+        warnings.warn(
+            IncompatibleVersion(
+                "To correctly convert to Turtle format, rdflib must be "
+                "version 6.0.0 or greater, however, the detected rdflib "
+                "version used by your Python interpreter is "
+                f"{__rdflib_version__!r}. For more information see the "
+                "'Known issues' section of the README."
+            )
+        )
+
+    graph = Graph()
+    try:
+        graph.parse(input_ontology, format=fmt)
+    except PluginException as exc:  # Add input_ontology to exception msg
+        raise PluginException(
+            f'Cannot load "{input_ontology}": {exc.msg}'
+        ).with_traceback(exc.__traceback__)
+    graph.serialize(destination=output_ontology, format=output_format)
+    recur(graph, outext)
+
+
+
+ +
+ + + +
+ + + +

+directory_layout(onto) + + +

+ +
+ +

Analyse IRIs of imported ontologies and suggested a directory +layout for saving recursively.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
onto

Ontology to analyse.

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
layout

A dict mapping ontology objects to relative path names + derived from the ontology IRIs. No file name extension are + added.

+

Examples:

+

Assume that our ontology onto has IRI ex:onto. If it directly +or indirectly imports ontologies with IRIs ex:A/ontoA, ex:B/ontoB +and ex:A/C/ontoC, this function will return the following dict:

+
{
+    onto: "onto",
+    ontoA: "A/ontoA",
+    ontoB: "B/ontoB",
+    ontoC: "A/C/ontoC",
+}
+
+

where ontoA, ontoB and ontoC are imported Ontology objects.

+ +
+ Source code in ontopy/utils.py +
def directory_layout(onto):
+    """Analyse IRIs of imported ontologies and suggested a directory
+    layout for saving recursively.
+
+    Arguments:
+        onto: Ontology to analyse.
+
+    Returns:
+        layout: A dict mapping ontology objects to relative path names
+            derived from the ontology IRIs. No file name extension are
+            added.
+
+    Example:
+        Assume that our ontology `onto` has IRI `ex:onto`. If it directly
+        or indirectly imports ontologies with IRIs `ex:A/ontoA`, `ex:B/ontoB`
+        and `ex:A/C/ontoC`, this function will return the following dict:
+
+            {
+                onto: "onto",
+                ontoA: "A/ontoA",
+                ontoB: "B/ontoB",
+                ontoC: "A/C/ontoC",
+            }
+
+        where `ontoA`, `ontoB` and `ontoC` are imported Ontology objects.
+    """
+    layout = {}
+
+    def recur(o):
+        for imported in o.imported_ontologies:
+            if imported not in layout:
+                recur(imported)
+        baseiri = o.base_iri.rstrip("/#")
+
+        # Some heuristics here to reproduce the EMMO layout.
+        # It might not apply to all ontologies, so maybe it should be
+        # made optional?  Alternatively, change EMMO ontology IRIs to
+        # match the directory layout.
+        emmolayout = (
+            any(
+                oo.base_iri.startswith(baseiri + "/")
+                for oo in o.imported_ontologies
+            )
+            or o.base_iri == "http://emmo.info/emmo/mereocausality#"
+        )
+
+        layout[o] = (
+            baseiri + "/" + os.path.basename(baseiri) if emmolayout else baseiri
+        )
+
+    recur(onto)
+
+    # Strip off initial common prefix from all paths
+    prefix = os.path.commonprefix(list(layout.values()))
+    for o, path in layout.items():
+        layout[o] = path[len(prefix) :].lstrip("/")
+
+    return layout
+
+
+
+ +
+ + + +
+ + + +

+english(string) + + +

+ +
+ +

Returns string as an English location string.

+ +
+ Source code in ontopy/utils.py +
def english(string):
+    """Returns `string` as an English location string."""
+    return owlready2.locstr(string, lang="en")
+
+
+
+ +
+ + + +
+ + + +

+get_format(outfile, default, fmt=None) + + +

+ +
+ +

Infer format from outfile and format.

+ +
+ Source code in ontopy/utils.py +
def get_format(outfile: str, default: str, fmt: str = None):
+    """Infer format from outfile and format."""
+    if fmt is None:
+        fmt = os.path.splitext(outfile)[1]
+    if not fmt:
+        fmt = default
+    return fmt.lstrip(".")
+
+
+
+ +
+ + + +
+ + + +

+get_label(entity) + + +

+ +
+ +

Returns the label of an entity.

+ +
+ Source code in ontopy/utils.py +
def get_label(entity):
+    """Returns the label of an entity."""
+    if hasattr(entity, "prefLabel") and entity.prefLabel:
+        return entity.prefLabel.first()
+    if hasattr(entity, "label") and entity.label:
+        return entity.label.first()
+    if hasattr(entity, "__name__"):
+        return entity.__name__
+    if hasattr(entity, "name"):
+        return str(entity.name)
+    if isinstance(entity, str):
+        return entity
+    return repr(entity)
+
+
+
+ +
+ + + +
+ + + +

+getiriname(iri) + + +

+ +
+ +

Return name part of an IRI.

+

The name part is what follows after the last slash or hash.

+ +
+ Source code in ontopy/utils.py +
def getiriname(iri):
+    """Return name part of an IRI.
+
+    The name part is what follows after the last slash or hash.
+    """
+    res = urllib.parse.urlparse(iri)
+    return res.fragment if res.fragment else res.path.rsplit("/", 1)[-1]
+
+
+
+ +
+ + + +
+ + + +

+infer_version(iri, version_iri) + + +

+ +
+ +

Infer version from IRI and versionIRI.

+ +
+ Source code in ontopy/utils.py +
def infer_version(iri, version_iri):
+    """Infer version from IRI and versionIRI."""
+    if str(version_iri[: len(iri)]) == str(iri):
+        version = version_iri[len(iri) :].lstrip("/")
+    else:
+        j = 0
+        version_parts = []
+        for i, char in enumerate(iri):
+            while i + j < len(version_iri) and char != version_iri[i + j]:
+                version_parts.append(version_iri[i + j])
+                j += 1
+        version = "".join(version_parts).lstrip("/").rstrip("/#")
+
+    if "/" in version:
+        raise ValueError(
+            f"version IRI {version_iri!r} is not consistent with base IRI "
+            f"{iri!r}"
+        )
+    return version
+
+
+
+ +
+ + + +
+ + + +

+isinteractive() + + +

+ +
+ +

Returns true if we are running from an interactive interpreater, +false otherwise.

+ +
+ Source code in ontopy/utils.py +
def isinteractive():
+    """Returns true if we are running from an interactive interpreater,
+    false otherwise."""
+    return bool(
+        hasattr(__builtins__, "__IPYTHON__")
+        or sys.flags.interactive
+        or hasattr(sys, "ps1")
+    )
+
+
+
+ +
+ + + +
+ + + +

+normalise_url(url) + + +

+ +
+ +

Returns url in a normalised form.

+ +
+ Source code in ontopy/utils.py +
def normalise_url(url):
+    """Returns `url` in a normalised form."""
+    splitted = urllib.parse.urlsplit(url)
+    components = list(splitted)
+    components[2] = os.path.normpath(splitted.path)
+    return urllib.parse.urlunsplit(components)
+
+
+
+ +
+ + + +
+ + + +

+read_catalog(uri, catalog_file='catalog-v001.xml', baseuri=None, recursive=False, relative_to=None, return_paths=False, visited_iris=None, visited_paths=None) + + +

+ +
+ +

Reads a Protègè catalog file and returns as a dict.

+

The returned dict maps the ontology IRI (name) to its actual +location (URI). The location can be either an absolute file path +or a HTTP, HTTPS or FTP web location.

+

uri is a string locating the catalog file. It may be a http or +https web location or a file path.

+

The catalog_file argument spesifies the catalog file name and is +used if path is used when recursive is true or when path is a +directory.

+

If baseuri is not None, it will be used as the base URI for the +mapped locations. Otherwise it defaults to uri with its final +component omitted.

+

If recursive is true, catalog files in sub-folders are also read.

+

if relative_to is given, the paths in the returned dict will be +relative to this path.

+

If return_paths is true, a set of directory paths to source +files is returned in addition to the default dict.

+

The visited_uris and visited_paths arguments are only intended for +internal use to avoid infinite recursions.

+

A ReadCatalogError is raised if the catalog file cannot be found.

+ +
+ Source code in ontopy/utils.py +
def read_catalog(  # pylint: disable=too-many-locals,too-many-statements,too-many-arguments
+    uri,
+    catalog_file="catalog-v001.xml",
+    baseuri=None,
+    recursive=False,
+    relative_to=None,
+    return_paths=False,
+    visited_iris=None,
+    visited_paths=None,
+):
+    """Reads a Protègè catalog file and returns as a dict.
+
+    The returned dict maps the ontology IRI (name) to its actual
+    location (URI).  The location can be either an absolute file path
+    or a HTTP, HTTPS or FTP web location.
+
+    `uri` is a string locating the catalog file. It may be a http or
+    https web location or a file path.
+
+    The `catalog_file` argument spesifies the catalog file name and is
+    used if `path` is used when `recursive` is true or when `path` is a
+    directory.
+
+    If `baseuri` is not None, it will be used as the base URI for the
+    mapped locations.  Otherwise it defaults to `uri` with its final
+    component omitted.
+
+    If `recursive` is true, catalog files in sub-folders are also read.
+
+    if `relative_to` is given, the paths in the returned dict will be
+    relative to this path.
+
+    If `return_paths` is true, a set of directory paths to source
+    files is returned in addition to the default dict.
+
+    The `visited_uris` and `visited_paths` arguments are only intended for
+    internal use to avoid infinite recursions.
+
+    A ReadCatalogError is raised if the catalog file cannot be found.
+    """
+    # pylint: disable=too-many-branches
+
+    # Protocols supported by urllib.request
+    web_protocols = "http://", "https://", "ftp://"
+    uri = str(uri)  # in case uri is a pathlib.Path object
+    iris = visited_iris if visited_iris else {}
+    dirs = visited_paths if visited_paths else set()
+    if uri in iris:
+        return (iris, dirs) if return_paths else iris
+
+    if uri.startswith(web_protocols):
+        # Call read_catalog() recursively to ensure that the temporary
+        # file is properly cleaned up
+        with tempfile.TemporaryDirectory() as tmpdir:
+            destfile = os.path.join(tmpdir, catalog_file)
+            uris = {  # maps uri to base
+                uri: (baseuri if baseuri else os.path.dirname(uri)),
+                f'{uri.rstrip("/")}/{catalog_file}': (
+                    baseuri if baseuri else uri.rstrip("/")
+                ),
+                f"{os.path.dirname(uri)}/{catalog_file}": (
+                    os.path.dirname(uri)
+                ),
+            }
+            for url, base in uris.items():
+                try:
+                    # The URL can only contain the schemes from `web_protocols`.
+                    _, msg = urllib.request.urlretrieve(url, destfile)  # nosec
+                except urllib.request.URLError:
+                    continue
+                else:
+                    if "Content-Length" not in msg:
+                        continue
+
+                    return read_catalog(
+                        destfile,
+                        catalog_file=catalog_file,
+                        baseuri=baseuri if baseuri else base,
+                        recursive=recursive,
+                        return_paths=return_paths,
+                        visited_iris=iris,
+                        visited_paths=dirs,
+                    )
+            raise ReadCatalogError(
+                "Cannot download catalog from URLs: " + ", ".join(uris)
+            )
+    elif uri.startswith("file://"):
+        path = uri[7:]
+    else:
+        path = uri
+
+    if os.path.isdir(path):
+        dirname = os.path.abspath(path)
+        filepath = os.path.join(dirname, catalog_file)
+    else:
+        catalog_file = os.path.basename(path)
+        filepath = os.path.abspath(path)
+        dirname = os.path.dirname(filepath)
+
+    def gettag(entity):
+        return entity.tag.rsplit("}", 1)[-1]
+
+    def load_catalog(filepath):
+        if not os.path.exists(filepath):
+            raise ReadCatalogError("No such catalog file: " + filepath)
+        dirname = os.path.normpath(os.path.dirname(filepath))
+        dirs.add(baseuri if baseuri else dirname)
+        xml = ET.parse(filepath)
+        root = xml.getroot()
+        if gettag(root) != "catalog":
+            raise ReadCatalogError(
+                f"expected root tag of catalog file {filepath!r} to be "
+                '"catalog"'
+            )
+        for child in root:
+            if gettag(child) == "uri":
+                load_uri(child, dirname)
+            elif gettag(child) == "group":
+                for uri in child:
+                    load_uri(uri, dirname)
+
+    def load_uri(uri, dirname):
+        if gettag(uri) != "uri":
+            raise ValueError(f"{gettag(uri)!r} should be 'uri'.")
+        uri_as_str = uri.attrib["uri"]
+        if uri_as_str.startswith(web_protocols):
+            url = uri_as_str
+        else:
+            uri_as_str = os.path.normpath(uri_as_str)
+            if baseuri and baseuri.startswith(web_protocols):
+                url = f"{baseuri}/{uri_as_str}"
+            else:
+                url = os.path.join(baseuri if baseuri else dirname, uri_as_str)
+
+        iris.setdefault(uri.attrib["name"], url)
+        if recursive:
+            directory = os.path.dirname(url)
+            if directory not in dirs:
+                catalog = os.path.join(directory, catalog_file)
+                if catalog.startswith(web_protocols):
+                    iris_, dirs_ = read_catalog(
+                        catalog,
+                        catalog_file=catalog_file,
+                        baseuri=None,
+                        recursive=recursive,
+                        return_paths=True,
+                        visited_iris=iris,
+                        visited_paths=dirs,
+                    )
+                    iris.update(iris_)
+                    dirs.update(dirs_)
+                else:
+                    load_catalog(catalog)
+
+    load_catalog(filepath)
+
+    if relative_to:
+        for iri, path in iris.items():
+            iris[iri] = os.path.relpath(path, relative_to)
+
+    if return_paths:
+        return iris, dirs
+    return iris
+
+
+
+ +
+ + + +
+ + + +

+rename_iris(onto, annotation='prefLabel') + + +

+ +
+ +

For IRIs with the given annotation, change the name of the entity +to the value of the annotation. Also add an skos:exactMatch +annotation referring to the old IRI.

+ +
+ Source code in ontopy/utils.py +
def rename_iris(onto, annotation="prefLabel"):
+    """For IRIs with the given annotation, change the name of the entity
+    to the value of the annotation.  Also add an `skos:exactMatch`
+    annotation referring to the old IRI.
+    """
+    exactMatch = onto._abbreviate(  # pylint:disable=invalid-name
+        "http://www.w3.org/2004/02/skos/core#exactMatch"
+    )
+    for entity in onto.get_entities():
+        if hasattr(entity, annotation) and getattr(entity, annotation):
+            onto._add_data_triple_spod(
+                entity.storid, exactMatch, entity.iri, ""
+            )
+            entity.name = getattr(entity, annotation).first()
+
+
+
+ +
+ + + +
+ + + +

+write_catalog(irimap, output='catalog-v001.xml', directory='.', relative_paths=True, append=False) + + +

+ +
+ +

Write catalog file do disk.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
irimapdict

dict mapping ontology IRIs (name) to actual locations +(URIs). It has the same format as the dict returned by +read_catalog().

required
outputUnion[str, Path]

name of catalog file.

'catalog-v001.xml'
directoryUnion[str, Path]

directory path to the catalog file. Only used if output +is a relative path.

'.'
relative_pathsbool

whether to write file paths inside the catalog as +relative paths (instead of absolute paths).

True
appendbool

whether to append to a possible existing catalog file. +If false, an existing file will be overwritten.

False
+
+ Source code in ontopy/utils.py +
def write_catalog(
+    irimap: dict,
+    output: "Union[str, Path]" = "catalog-v001.xml",
+    directory: "Union[str, Path]" = ".",
+    relative_paths: bool = True,
+    append: bool = False,
+):  # pylint: disable=redefined-builtin
+    """Write catalog file do disk.
+
+    Args:
+        irimap: dict mapping ontology IRIs (name) to actual locations
+            (URIs).  It has the same format as the dict returned by
+            read_catalog().
+        output: name of catalog file.
+        directory: directory path to the catalog file.  Only used if `output`
+            is a relative path.
+        relative_paths: whether to write file paths inside the catalog as
+            relative paths (instead of  absolute paths).
+        append: whether to append to a possible existing catalog file.
+            If false, an existing file will be overwritten.
+    """
+    filename = Path(directory) / output
+
+    if relative_paths:
+        irimap = irimap.copy()  # don't modify provided irimap
+        for iri, path in irimap.items():
+            if os.path.isabs(path):
+                irimap[iri] = os.path.relpath(path, filename.parent)
+
+    if filename.exists() and append:
+        iris = read_catalog(filename)
+        iris.update(irimap)
+        irimap = iris
+
+    res = [
+        '<?xml version="1.0" encoding="UTF-8" standalone="no"?>',
+        '<catalog prefer="public" '
+        'xmlns="urn:oasis:names:tc:entity:xmlns:xml:catalog">',
+        '    <group id="Folder Repository, directory=, recursive=true, '
+        'Auto-Update=false, version=2" prefer="public" xml:base="">',
+    ]
+    for iri, path in irimap.items():
+        res.append(f'        <uri name="{iri}" uri="{path}"/>')
+    res.append("    </group>")
+    res.append("</catalog>")
+    with open(filename, "wt") as handle:
+        handle.write("\n".join(res) + "\n")
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/0.6.1/assets/_mkdocstrings.css b/0.6.1/assets/_mkdocstrings.css new file mode 100644 index 000000000..b2cceef22 --- /dev/null +++ b/0.6.1/assets/_mkdocstrings.css @@ -0,0 +1,16 @@ + +/* Don't capitalize names. */ +h5.doc-heading { + text-transform: none !important; +} + +/* Avoid breaking parameters name, etc. in table cells. */ +.doc-contents td code { + word-break: normal !important; +} + +/* For pieces of Markdown rendered in table cells. */ +.doc-contents td p { + margin-top: 0 !important; + margin-bottom: 0 !important; +} diff --git a/0.6.1/assets/images/favicon.png b/0.6.1/assets/images/favicon.png new file mode 100644 index 000000000..1cf13b9f9 Binary files /dev/null and b/0.6.1/assets/images/favicon.png differ diff --git a/0.6.1/assets/javascripts/bundle.7389ff0e.min.js b/0.6.1/assets/javascripts/bundle.7389ff0e.min.js new file mode 100644 index 000000000..c7df7197e --- /dev/null +++ b/0.6.1/assets/javascripts/bundle.7389ff0e.min.js @@ -0,0 +1,29 @@ +"use strict";(()=>{var Mi=Object.create;var gr=Object.defineProperty;var Li=Object.getOwnPropertyDescriptor;var _i=Object.getOwnPropertyNames,Ft=Object.getOwnPropertySymbols,Ai=Object.getPrototypeOf,xr=Object.prototype.hasOwnProperty,ro=Object.prototype.propertyIsEnumerable;var to=(e,t,r)=>t in e?gr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,P=(e,t)=>{for(var r in t||(t={}))xr.call(t,r)&&to(e,r,t[r]);if(Ft)for(var r of Ft(t))ro.call(t,r)&&to(e,r,t[r]);return e};var oo=(e,t)=>{var r={};for(var o in e)xr.call(e,o)&&t.indexOf(o)<0&&(r[o]=e[o]);if(e!=null&&Ft)for(var o of Ft(e))t.indexOf(o)<0&&ro.call(e,o)&&(r[o]=e[o]);return r};var yr=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var Ci=(e,t,r,o)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of _i(t))!xr.call(e,n)&&n!==r&&gr(e,n,{get:()=>t[n],enumerable:!(o=Li(t,n))||o.enumerable});return e};var jt=(e,t,r)=>(r=e!=null?Mi(Ai(e)):{},Ci(t||!e||!e.__esModule?gr(r,"default",{value:e,enumerable:!0}):r,e));var no=(e,t,r)=>new Promise((o,n)=>{var i=c=>{try{a(r.next(c))}catch(p){n(p)}},s=c=>{try{a(r.throw(c))}catch(p){n(p)}},a=c=>c.done?o(c.value):Promise.resolve(c.value).then(i,s);a((r=r.apply(e,t)).next())});var ao=yr((Er,io)=>{(function(e,t){typeof Er=="object"&&typeof io!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(Er,function(){"use strict";function e(r){var o=!0,n=!1,i=null,s={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function a(C){return!!(C&&C!==document&&C.nodeName!=="HTML"&&C.nodeName!=="BODY"&&"classList"in C&&"contains"in C.classList)}function c(C){var ct=C.type,Ve=C.tagName;return!!(Ve==="INPUT"&&s[ct]&&!C.readOnly||Ve==="TEXTAREA"&&!C.readOnly||C.isContentEditable)}function p(C){C.classList.contains("focus-visible")||(C.classList.add("focus-visible"),C.setAttribute("data-focus-visible-added",""))}function l(C){C.hasAttribute("data-focus-visible-added")&&(C.classList.remove("focus-visible"),C.removeAttribute("data-focus-visible-added"))}function f(C){C.metaKey||C.altKey||C.ctrlKey||(a(r.activeElement)&&p(r.activeElement),o=!0)}function u(C){o=!1}function d(C){a(C.target)&&(o||c(C.target))&&p(C.target)}function y(C){a(C.target)&&(C.target.classList.contains("focus-visible")||C.target.hasAttribute("data-focus-visible-added"))&&(n=!0,window.clearTimeout(i),i=window.setTimeout(function(){n=!1},100),l(C.target))}function b(C){document.visibilityState==="hidden"&&(n&&(o=!0),D())}function D(){document.addEventListener("mousemove",J),document.addEventListener("mousedown",J),document.addEventListener("mouseup",J),document.addEventListener("pointermove",J),document.addEventListener("pointerdown",J),document.addEventListener("pointerup",J),document.addEventListener("touchmove",J),document.addEventListener("touchstart",J),document.addEventListener("touchend",J)}function Q(){document.removeEventListener("mousemove",J),document.removeEventListener("mousedown",J),document.removeEventListener("mouseup",J),document.removeEventListener("pointermove",J),document.removeEventListener("pointerdown",J),document.removeEventListener("pointerup",J),document.removeEventListener("touchmove",J),document.removeEventListener("touchstart",J),document.removeEventListener("touchend",J)}function J(C){C.target.nodeName&&C.target.nodeName.toLowerCase()==="html"||(o=!1,Q())}document.addEventListener("keydown",f,!0),document.addEventListener("mousedown",u,!0),document.addEventListener("pointerdown",u,!0),document.addEventListener("touchstart",u,!0),document.addEventListener("visibilitychange",b,!0),D(),r.addEventListener("focus",d,!0),r.addEventListener("blur",y,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var Kr=yr((kt,qr)=>{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof kt=="object"&&typeof qr=="object"?qr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof kt=="object"?kt.ClipboardJS=r():t.ClipboardJS=r()})(kt,function(){return function(){var e={686:function(o,n,i){"use strict";i.d(n,{default:function(){return Oi}});var s=i(279),a=i.n(s),c=i(370),p=i.n(c),l=i(817),f=i.n(l);function u(V){try{return document.execCommand(V)}catch(_){return!1}}var d=function(_){var O=f()(_);return u("cut"),O},y=d;function b(V){var _=document.documentElement.getAttribute("dir")==="rtl",O=document.createElement("textarea");O.style.fontSize="12pt",O.style.border="0",O.style.padding="0",O.style.margin="0",O.style.position="absolute",O.style[_?"right":"left"]="-9999px";var $=window.pageYOffset||document.documentElement.scrollTop;return O.style.top="".concat($,"px"),O.setAttribute("readonly",""),O.value=V,O}var D=function(_,O){var $=b(_);O.container.appendChild($);var N=f()($);return u("copy"),$.remove(),N},Q=function(_){var O=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},$="";return typeof _=="string"?$=D(_,O):_ instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(_==null?void 0:_.type)?$=D(_.value,O):($=f()(_),u("copy")),$},J=Q;function C(V){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?C=function(O){return typeof O}:C=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},C(V)}var ct=function(){var _=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},O=_.action,$=O===void 0?"copy":O,N=_.container,Y=_.target,ke=_.text;if($!=="copy"&&$!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(Y!==void 0)if(Y&&C(Y)==="object"&&Y.nodeType===1){if($==="copy"&&Y.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if($==="cut"&&(Y.hasAttribute("readonly")||Y.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(ke)return J(ke,{container:N});if(Y)return $==="cut"?y(Y):J(Y,{container:N})},Ve=ct;function Fe(V){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?Fe=function(O){return typeof O}:Fe=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},Fe(V)}function vi(V,_){if(!(V instanceof _))throw new TypeError("Cannot call a class as a function")}function eo(V,_){for(var O=0;O<_.length;O++){var $=_[O];$.enumerable=$.enumerable||!1,$.configurable=!0,"value"in $&&($.writable=!0),Object.defineProperty(V,$.key,$)}}function gi(V,_,O){return _&&eo(V.prototype,_),O&&eo(V,O),V}function xi(V,_){if(typeof _!="function"&&_!==null)throw new TypeError("Super expression must either be null or a function");V.prototype=Object.create(_&&_.prototype,{constructor:{value:V,writable:!0,configurable:!0}}),_&&br(V,_)}function br(V,_){return br=Object.setPrototypeOf||function($,N){return $.__proto__=N,$},br(V,_)}function yi(V){var _=Ti();return function(){var $=Rt(V),N;if(_){var Y=Rt(this).constructor;N=Reflect.construct($,arguments,Y)}else N=$.apply(this,arguments);return Ei(this,N)}}function Ei(V,_){return _&&(Fe(_)==="object"||typeof _=="function")?_:wi(V)}function wi(V){if(V===void 0)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return V}function Ti(){if(typeof Reflect=="undefined"||!Reflect.construct||Reflect.construct.sham)return!1;if(typeof Proxy=="function")return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],function(){})),!0}catch(V){return!1}}function Rt(V){return Rt=Object.setPrototypeOf?Object.getPrototypeOf:function(O){return O.__proto__||Object.getPrototypeOf(O)},Rt(V)}function vr(V,_){var O="data-clipboard-".concat(V);if(_.hasAttribute(O))return _.getAttribute(O)}var Si=function(V){xi(O,V);var _=yi(O);function O($,N){var Y;return vi(this,O),Y=_.call(this),Y.resolveOptions(N),Y.listenClick($),Y}return gi(O,[{key:"resolveOptions",value:function(){var N=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof N.action=="function"?N.action:this.defaultAction,this.target=typeof N.target=="function"?N.target:this.defaultTarget,this.text=typeof N.text=="function"?N.text:this.defaultText,this.container=Fe(N.container)==="object"?N.container:document.body}},{key:"listenClick",value:function(N){var Y=this;this.listener=p()(N,"click",function(ke){return Y.onClick(ke)})}},{key:"onClick",value:function(N){var Y=N.delegateTarget||N.currentTarget,ke=this.action(Y)||"copy",It=Ve({action:ke,container:this.container,target:this.target(Y),text:this.text(Y)});this.emit(It?"success":"error",{action:ke,text:It,trigger:Y,clearSelection:function(){Y&&Y.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(N){return vr("action",N)}},{key:"defaultTarget",value:function(N){var Y=vr("target",N);if(Y)return document.querySelector(Y)}},{key:"defaultText",value:function(N){return vr("text",N)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(N){var Y=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return J(N,Y)}},{key:"cut",value:function(N){return y(N)}},{key:"isSupported",value:function(){var N=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],Y=typeof N=="string"?[N]:N,ke=!!document.queryCommandSupported;return Y.forEach(function(It){ke=ke&&!!document.queryCommandSupported(It)}),ke}}]),O}(a()),Oi=Si},828:function(o){var n=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function s(a,c){for(;a&&a.nodeType!==n;){if(typeof a.matches=="function"&&a.matches(c))return a;a=a.parentNode}}o.exports=s},438:function(o,n,i){var s=i(828);function a(l,f,u,d,y){var b=p.apply(this,arguments);return l.addEventListener(u,b,y),{destroy:function(){l.removeEventListener(u,b,y)}}}function c(l,f,u,d,y){return typeof l.addEventListener=="function"?a.apply(null,arguments):typeof u=="function"?a.bind(null,document).apply(null,arguments):(typeof l=="string"&&(l=document.querySelectorAll(l)),Array.prototype.map.call(l,function(b){return a(b,f,u,d,y)}))}function p(l,f,u,d){return function(y){y.delegateTarget=s(y.target,f),y.delegateTarget&&d.call(l,y)}}o.exports=c},879:function(o,n){n.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},n.nodeList=function(i){var s=Object.prototype.toString.call(i);return i!==void 0&&(s==="[object NodeList]"||s==="[object HTMLCollection]")&&"length"in i&&(i.length===0||n.node(i[0]))},n.string=function(i){return typeof i=="string"||i instanceof String},n.fn=function(i){var s=Object.prototype.toString.call(i);return s==="[object Function]"}},370:function(o,n,i){var s=i(879),a=i(438);function c(u,d,y){if(!u&&!d&&!y)throw new Error("Missing required arguments");if(!s.string(d))throw new TypeError("Second argument must be a String");if(!s.fn(y))throw new TypeError("Third argument must be a Function");if(s.node(u))return p(u,d,y);if(s.nodeList(u))return l(u,d,y);if(s.string(u))return f(u,d,y);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function p(u,d,y){return u.addEventListener(d,y),{destroy:function(){u.removeEventListener(d,y)}}}function l(u,d,y){return Array.prototype.forEach.call(u,function(b){b.addEventListener(d,y)}),{destroy:function(){Array.prototype.forEach.call(u,function(b){b.removeEventListener(d,y)})}}}function f(u,d,y){return a(document.body,u,d,y)}o.exports=c},817:function(o){function n(i){var s;if(i.nodeName==="SELECT")i.focus(),s=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var a=i.hasAttribute("readonly");a||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),a||i.removeAttribute("readonly"),s=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var c=window.getSelection(),p=document.createRange();p.selectNodeContents(i),c.removeAllRanges(),c.addRange(p),s=c.toString()}return s}o.exports=n},279:function(o){function n(){}n.prototype={on:function(i,s,a){var c=this.e||(this.e={});return(c[i]||(c[i]=[])).push({fn:s,ctx:a}),this},once:function(i,s,a){var c=this;function p(){c.off(i,p),s.apply(a,arguments)}return p._=s,this.on(i,p,a)},emit:function(i){var s=[].slice.call(arguments,1),a=((this.e||(this.e={}))[i]||[]).slice(),c=0,p=a.length;for(c;c{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var Wa=/["'&<>]/;Vn.exports=Ua;function Ua(e){var t=""+e,r=Wa.exec(t);if(!r)return t;var o,n="",i=0,s=0;for(i=r.index;i0&&i[i.length-1])&&(p[0]===6||p[0]===2)){r=0;continue}if(p[0]===3&&(!i||p[1]>i[0]&&p[1]=e.length&&(e=void 0),{value:e&&e[o++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function z(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var o=r.call(e),n,i=[],s;try{for(;(t===void 0||t-- >0)&&!(n=o.next()).done;)i.push(n.value)}catch(a){s={error:a}}finally{try{n&&!n.done&&(r=o.return)&&r.call(o)}finally{if(s)throw s.error}}return i}function K(e,t,r){if(r||arguments.length===2)for(var o=0,n=t.length,i;o1||a(u,d)})})}function a(u,d){try{c(o[u](d))}catch(y){f(i[0][3],y)}}function c(u){u.value instanceof ot?Promise.resolve(u.value.v).then(p,l):f(i[0][2],u)}function p(u){a("next",u)}function l(u){a("throw",u)}function f(u,d){u(d),i.shift(),i.length&&a(i[0][0],i[0][1])}}function po(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof be=="function"?be(e):e[Symbol.iterator](),r={},o("next"),o("throw"),o("return"),r[Symbol.asyncIterator]=function(){return this},r);function o(i){r[i]=e[i]&&function(s){return new Promise(function(a,c){s=e[i](s),n(a,c,s.done,s.value)})}}function n(i,s,a,c){Promise.resolve(c).then(function(p){i({value:p,done:a})},s)}}function k(e){return typeof e=="function"}function pt(e){var t=function(o){Error.call(o),o.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var Ut=pt(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: +`+r.map(function(o,n){return n+1+") "+o.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=r}});function ze(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var je=function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,o,n,i;if(!this.closed){this.closed=!0;var s=this._parentage;if(s)if(this._parentage=null,Array.isArray(s))try{for(var a=be(s),c=a.next();!c.done;c=a.next()){var p=c.value;p.remove(this)}}catch(b){t={error:b}}finally{try{c&&!c.done&&(r=a.return)&&r.call(a)}finally{if(t)throw t.error}}else s.remove(this);var l=this.initialTeardown;if(k(l))try{l()}catch(b){i=b instanceof Ut?b.errors:[b]}var f=this._finalizers;if(f){this._finalizers=null;try{for(var u=be(f),d=u.next();!d.done;d=u.next()){var y=d.value;try{lo(y)}catch(b){i=i!=null?i:[],b instanceof Ut?i=K(K([],z(i)),z(b.errors)):i.push(b)}}}catch(b){o={error:b}}finally{try{d&&!d.done&&(n=u.return)&&n.call(u)}finally{if(o)throw o.error}}}if(i)throw new Ut(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)lo(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&ze(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&ze(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=function(){var t=new e;return t.closed=!0,t}(),e}();var Tr=je.EMPTY;function Nt(e){return e instanceof je||e&&"closed"in e&&k(e.remove)&&k(e.add)&&k(e.unsubscribe)}function lo(e){k(e)?e():e.unsubscribe()}var He={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var lt={setTimeout:function(e,t){for(var r=[],o=2;o0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var o=this,n=this,i=n.hasError,s=n.isStopped,a=n.observers;return i||s?Tr:(this.currentObservers=null,a.push(r),new je(function(){o.currentObservers=null,ze(a,r)}))},t.prototype._checkFinalizedStatuses=function(r){var o=this,n=o.hasError,i=o.thrownError,s=o.isStopped;n?r.error(i):s&&r.complete()},t.prototype.asObservable=function(){var r=new I;return r.source=this,r},t.create=function(r,o){return new xo(r,o)},t}(I);var xo=function(e){se(t,e);function t(r,o){var n=e.call(this)||this;return n.destination=r,n.source=o,n}return t.prototype.next=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.next)===null||n===void 0||n.call(o,r)},t.prototype.error=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.error)===null||n===void 0||n.call(o,r)},t.prototype.complete=function(){var r,o;(o=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||o===void 0||o.call(r)},t.prototype._subscribe=function(r){var o,n;return(n=(o=this.source)===null||o===void 0?void 0:o.subscribe(r))!==null&&n!==void 0?n:Tr},t}(x);var St={now:function(){return(St.delegate||Date).now()},delegate:void 0};var Ot=function(e){se(t,e);function t(r,o,n){r===void 0&&(r=1/0),o===void 0&&(o=1/0),n===void 0&&(n=St);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=o,i._timestampProvider=n,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=o===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,o),i}return t.prototype.next=function(r){var o=this,n=o.isStopped,i=o._buffer,s=o._infiniteTimeWindow,a=o._timestampProvider,c=o._windowTime;n||(i.push(r),!s&&i.push(a.now()+c)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var o=this._innerSubscribe(r),n=this,i=n._infiniteTimeWindow,s=n._buffer,a=s.slice(),c=0;c0?e.prototype.requestAsyncId.call(this,r,o,n):(r.actions.push(this),r._scheduled||(r._scheduled=ut.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,o,n){var i;if(n===void 0&&(n=0),n!=null?n>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,o,n);var s=r.actions;o!=null&&((i=s[s.length-1])===null||i===void 0?void 0:i.id)!==o&&(ut.cancelAnimationFrame(o),r._scheduled=void 0)},t}(zt);var wo=function(e){se(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var o=this._scheduled;this._scheduled=void 0;var n=this.actions,i;r=r||n.shift();do if(i=r.execute(r.state,r.delay))break;while((r=n[0])&&r.id===o&&n.shift());if(this._active=!1,i){for(;(r=n[0])&&r.id===o&&n.shift();)r.unsubscribe();throw i}},t}(qt);var ge=new wo(Eo);var M=new I(function(e){return e.complete()});function Kt(e){return e&&k(e.schedule)}function Cr(e){return e[e.length-1]}function Ge(e){return k(Cr(e))?e.pop():void 0}function Ae(e){return Kt(Cr(e))?e.pop():void 0}function Qt(e,t){return typeof Cr(e)=="number"?e.pop():t}var dt=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function Yt(e){return k(e==null?void 0:e.then)}function Bt(e){return k(e[ft])}function Gt(e){return Symbol.asyncIterator&&k(e==null?void 0:e[Symbol.asyncIterator])}function Jt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function Wi(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var Xt=Wi();function Zt(e){return k(e==null?void 0:e[Xt])}function er(e){return co(this,arguments,function(){var r,o,n,i;return Wt(this,function(s){switch(s.label){case 0:r=e.getReader(),s.label=1;case 1:s.trys.push([1,,9,10]),s.label=2;case 2:return[4,ot(r.read())];case 3:return o=s.sent(),n=o.value,i=o.done,i?[4,ot(void 0)]:[3,5];case 4:return[2,s.sent()];case 5:return[4,ot(n)];case 6:return[4,s.sent()];case 7:return s.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function tr(e){return k(e==null?void 0:e.getReader)}function F(e){if(e instanceof I)return e;if(e!=null){if(Bt(e))return Ui(e);if(dt(e))return Ni(e);if(Yt(e))return Di(e);if(Gt(e))return To(e);if(Zt(e))return Vi(e);if(tr(e))return zi(e)}throw Jt(e)}function Ui(e){return new I(function(t){var r=e[ft]();if(k(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function Ni(e){return new I(function(t){for(var r=0;r=2;return function(o){return o.pipe(e?v(function(n,i){return e(n,i,o)}):pe,ue(1),r?$e(t):Uo(function(){return new or}))}}function Rr(e){return e<=0?function(){return M}:g(function(t,r){var o=[];t.subscribe(E(r,function(n){o.push(n),e=2,!0))}function de(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new x}:t,o=e.resetOnError,n=o===void 0?!0:o,i=e.resetOnComplete,s=i===void 0?!0:i,a=e.resetOnRefCountZero,c=a===void 0?!0:a;return function(p){var l,f,u,d=0,y=!1,b=!1,D=function(){f==null||f.unsubscribe(),f=void 0},Q=function(){D(),l=u=void 0,y=b=!1},J=function(){var C=l;Q(),C==null||C.unsubscribe()};return g(function(C,ct){d++,!b&&!y&&D();var Ve=u=u!=null?u:r();ct.add(function(){d--,d===0&&!b&&!y&&(f=jr(J,c))}),Ve.subscribe(ct),!l&&d>0&&(l=new it({next:function(Fe){return Ve.next(Fe)},error:function(Fe){b=!0,D(),f=jr(Q,n,Fe),Ve.error(Fe)},complete:function(){y=!0,D(),f=jr(Q,s),Ve.complete()}}),F(C).subscribe(l))})(p)}}function jr(e,t){for(var r=[],o=2;oe.next(document)),e}function W(e,t=document){return Array.from(t.querySelectorAll(e))}function U(e,t=document){let r=ce(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function ce(e,t=document){return t.querySelector(e)||void 0}function Ie(){return document.activeElement instanceof HTMLElement&&document.activeElement||void 0}var ca=L(h(document.body,"focusin"),h(document.body,"focusout")).pipe(ye(1),q(void 0),m(()=>Ie()||document.body),Z(1));function vt(e){return ca.pipe(m(t=>e.contains(t)),X())}function qo(e,t){return L(h(e,"mouseenter").pipe(m(()=>!0)),h(e,"mouseleave").pipe(m(()=>!1))).pipe(t?ye(t):pe,q(!1))}function Ue(e){return{x:e.offsetLeft,y:e.offsetTop}}function Ko(e){return L(h(window,"load"),h(window,"resize")).pipe(Le(0,ge),m(()=>Ue(e)),q(Ue(e)))}function ir(e){return{x:e.scrollLeft,y:e.scrollTop}}function et(e){return L(h(e,"scroll"),h(window,"resize")).pipe(Le(0,ge),m(()=>ir(e)),q(ir(e)))}function Qo(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)Qo(e,r)}function S(e,t,...r){let o=document.createElement(e);if(t)for(let n of Object.keys(t))typeof t[n]!="undefined"&&(typeof t[n]!="boolean"?o.setAttribute(n,t[n]):o.setAttribute(n,""));for(let n of r)Qo(o,n);return o}function ar(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function gt(e){let t=S("script",{src:e});return H(()=>(document.head.appendChild(t),L(h(t,"load"),h(t,"error").pipe(w(()=>kr(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(m(()=>{}),A(()=>document.head.removeChild(t)),ue(1))))}var Yo=new x,pa=H(()=>typeof ResizeObserver=="undefined"?gt("https://unpkg.com/resize-observer-polyfill"):R(void 0)).pipe(m(()=>new ResizeObserver(e=>{for(let t of e)Yo.next(t)})),w(e=>L(Ke,R(e)).pipe(A(()=>e.disconnect()))),Z(1));function le(e){return{width:e.offsetWidth,height:e.offsetHeight}}function Se(e){return pa.pipe(T(t=>t.observe(e)),w(t=>Yo.pipe(v(({target:r})=>r===e),A(()=>t.unobserve(e)),m(()=>le(e)))),q(le(e)))}function xt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function sr(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}var Bo=new x,la=H(()=>R(new IntersectionObserver(e=>{for(let t of e)Bo.next(t)},{threshold:0}))).pipe(w(e=>L(Ke,R(e)).pipe(A(()=>e.disconnect()))),Z(1));function yt(e){return la.pipe(T(t=>t.observe(e)),w(t=>Bo.pipe(v(({target:r})=>r===e),A(()=>t.unobserve(e)),m(({isIntersecting:r})=>r))))}function Go(e,t=16){return et(e).pipe(m(({y:r})=>{let o=le(e),n=xt(e);return r>=n.height-o.height-t}),X())}var cr={drawer:U("[data-md-toggle=drawer]"),search:U("[data-md-toggle=search]")};function Jo(e){return cr[e].checked}function Ye(e,t){cr[e].checked!==t&&cr[e].click()}function Ne(e){let t=cr[e];return h(t,"change").pipe(m(()=>t.checked),q(t.checked))}function ma(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function fa(){return L(h(window,"compositionstart").pipe(m(()=>!0)),h(window,"compositionend").pipe(m(()=>!1))).pipe(q(!1))}function Xo(){let e=h(window,"keydown").pipe(v(t=>!(t.metaKey||t.ctrlKey)),m(t=>({mode:Jo("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),v(({mode:t,type:r})=>{if(t==="global"){let o=Ie();if(typeof o!="undefined")return!ma(o,r)}return!0}),de());return fa().pipe(w(t=>t?M:e))}function me(){return new URL(location.href)}function st(e,t=!1){if(G("navigation.instant")&&!t){let r=S("a",{href:e.href});document.body.appendChild(r),r.click(),r.remove()}else location.href=e.href}function Zo(){return new x}function en(){return location.hash.slice(1)}function pr(e){let t=S("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function ua(e){return L(h(window,"hashchange"),e).pipe(m(en),q(en()),v(t=>t.length>0),Z(1))}function tn(e){return ua(e).pipe(m(t=>ce(`[id="${t}"]`)),v(t=>typeof t!="undefined"))}function At(e){let t=matchMedia(e);return nr(r=>t.addListener(()=>r(t.matches))).pipe(q(t.matches))}function rn(){let e=matchMedia("print");return L(h(window,"beforeprint").pipe(m(()=>!0)),h(window,"afterprint").pipe(m(()=>!1))).pipe(q(e.matches))}function Dr(e,t){return e.pipe(w(r=>r?t():M))}function lr(e,t){return new I(r=>{let o=new XMLHttpRequest;o.open("GET",`${e}`),o.responseType="blob",o.addEventListener("load",()=>{o.status>=200&&o.status<300?(r.next(o.response),r.complete()):r.error(new Error(o.statusText))}),o.addEventListener("error",()=>{r.error(new Error("Network Error"))}),o.addEventListener("abort",()=>{r.error(new Error("Request aborted"))}),typeof(t==null?void 0:t.progress$)!="undefined"&&(o.addEventListener("progress",n=>{if(n.lengthComputable)t.progress$.next(n.loaded/n.total*100);else{let i=Number(o.getResponseHeader("Content-Length"))||0;t.progress$.next(n.loaded/i*100)}}),t.progress$.next(5)),o.send()})}function De(e,t){return lr(e,t).pipe(w(r=>r.text()),m(r=>JSON.parse(r)),Z(1))}function on(e,t){let r=new DOMParser;return lr(e,t).pipe(w(o=>o.text()),m(o=>r.parseFromString(o,"text/xml")),Z(1))}function nn(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function an(){return L(h(window,"scroll",{passive:!0}),h(window,"resize",{passive:!0})).pipe(m(nn),q(nn()))}function sn(){return{width:innerWidth,height:innerHeight}}function cn(){return h(window,"resize",{passive:!0}).pipe(m(sn),q(sn()))}function pn(){return B([an(),cn()]).pipe(m(([e,t])=>({offset:e,size:t})),Z(1))}function mr(e,{viewport$:t,header$:r}){let o=t.pipe(te("size")),n=B([o,r]).pipe(m(()=>Ue(e)));return B([r,t,n]).pipe(m(([{height:i},{offset:s,size:a},{x:c,y:p}])=>({offset:{x:s.x-c,y:s.y-p+i},size:a})))}function da(e){return h(e,"message",t=>t.data)}function ha(e){let t=new x;return t.subscribe(r=>e.postMessage(r)),t}function ln(e,t=new Worker(e)){let r=da(t),o=ha(t),n=new x;n.subscribe(o);let i=o.pipe(ee(),oe(!0));return n.pipe(ee(),Re(r.pipe(j(i))),de())}var ba=U("#__config"),Et=JSON.parse(ba.textContent);Et.base=`${new URL(Et.base,me())}`;function he(){return Et}function G(e){return Et.features.includes(e)}function we(e,t){return typeof t!="undefined"?Et.translations[e].replace("#",t.toString()):Et.translations[e]}function Oe(e,t=document){return U(`[data-md-component=${e}]`,t)}function ne(e,t=document){return W(`[data-md-component=${e}]`,t)}function va(e){let t=U(".md-typeset > :first-child",e);return h(t,"click",{once:!0}).pipe(m(()=>U(".md-typeset",e)),m(r=>({hash:__md_hash(r.innerHTML)})))}function mn(e){if(!G("announce.dismiss")||!e.childElementCount)return M;if(!e.hidden){let t=U(".md-typeset",e);__md_hash(t.innerHTML)===__md_get("__announce")&&(e.hidden=!0)}return H(()=>{let t=new x;return t.subscribe(({hash:r})=>{e.hidden=!0,__md_set("__announce",r)}),va(e).pipe(T(r=>t.next(r)),A(()=>t.complete()),m(r=>P({ref:e},r)))})}function ga(e,{target$:t}){return t.pipe(m(r=>({hidden:r!==e})))}function fn(e,t){let r=new x;return r.subscribe(({hidden:o})=>{e.hidden=o}),ga(e,t).pipe(T(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))}function Ct(e,t){return t==="inline"?S("div",{class:"md-tooltip md-tooltip--inline",id:e,role:"tooltip"},S("div",{class:"md-tooltip__inner md-typeset"})):S("div",{class:"md-tooltip",id:e,role:"tooltip"},S("div",{class:"md-tooltip__inner md-typeset"}))}function un(e,t){if(t=t?`${t}_annotation_${e}`:void 0,t){let r=t?`#${t}`:void 0;return S("aside",{class:"md-annotation",tabIndex:0},Ct(t),S("a",{href:r,class:"md-annotation__index",tabIndex:-1},S("span",{"data-md-annotation-id":e})))}else return S("aside",{class:"md-annotation",tabIndex:0},Ct(t),S("span",{class:"md-annotation__index",tabIndex:-1},S("span",{"data-md-annotation-id":e})))}function dn(e){return S("button",{class:"md-clipboard md-icon",title:we("clipboard.copy"),"data-clipboard-target":`#${e} > code`})}function Vr(e,t){let r=t&2,o=t&1,n=Object.keys(e.terms).filter(c=>!e.terms[c]).reduce((c,p)=>[...c,S("del",null,p)," "],[]).slice(0,-1),i=he(),s=new URL(e.location,i.base);G("search.highlight")&&s.searchParams.set("h",Object.entries(e.terms).filter(([,c])=>c).reduce((c,[p])=>`${c} ${p}`.trim(),""));let{tags:a}=he();return S("a",{href:`${s}`,class:"md-search-result__link",tabIndex:-1},S("article",{class:"md-search-result__article md-typeset","data-md-score":e.score.toFixed(2)},r>0&&S("div",{class:"md-search-result__icon md-icon"}),r>0&&S("h1",null,e.title),r<=0&&S("h2",null,e.title),o>0&&e.text.length>0&&e.text,e.tags&&e.tags.map(c=>{let p=a?c in a?`md-tag-icon md-tag--${a[c]}`:"md-tag-icon":"";return S("span",{class:`md-tag ${p}`},c)}),o>0&&n.length>0&&S("p",{class:"md-search-result__terms"},we("search.result.term.missing"),": ",...n)))}function hn(e){let t=e[0].score,r=[...e],o=he(),n=r.findIndex(l=>!`${new URL(l.location,o.base)}`.includes("#")),[i]=r.splice(n,1),s=r.findIndex(l=>l.scoreVr(l,1)),...c.length?[S("details",{class:"md-search-result__more"},S("summary",{tabIndex:-1},S("div",null,c.length>0&&c.length===1?we("search.result.more.one"):we("search.result.more.other",c.length))),...c.map(l=>Vr(l,1)))]:[]];return S("li",{class:"md-search-result__item"},p)}function bn(e){return S("ul",{class:"md-source__facts"},Object.entries(e).map(([t,r])=>S("li",{class:`md-source__fact md-source__fact--${t}`},typeof r=="number"?ar(r):r)))}function zr(e){let t=`tabbed-control tabbed-control--${e}`;return S("div",{class:t,hidden:!0},S("button",{class:"tabbed-button",tabIndex:-1,"aria-hidden":"true"}))}function vn(e){return S("div",{class:"md-typeset__scrollwrap"},S("div",{class:"md-typeset__table"},e))}function xa(e){let t=he(),r=new URL(`../${e.version}/`,t.base);return S("li",{class:"md-version__item"},S("a",{href:`${r}`,class:"md-version__link"},e.title))}function gn(e,t){return S("div",{class:"md-version"},S("button",{class:"md-version__current","aria-label":we("select.version")},t.title),S("ul",{class:"md-version__list"},e.map(xa)))}var ya=0;function Ea(e,t){document.body.append(e);let{width:r}=le(e);e.style.setProperty("--md-tooltip-width",`${r}px`),e.remove();let o=sr(t),n=typeof o!="undefined"?et(o):R({x:0,y:0}),i=L(vt(t),qo(t)).pipe(X());return B([i,n]).pipe(m(([s,a])=>{let{x:c,y:p}=Ue(t),l=le(t),f=t.closest("table");return f&&t.parentElement&&(c+=f.offsetLeft+t.parentElement.offsetLeft,p+=f.offsetTop+t.parentElement.offsetTop),{active:s,offset:{x:c-a.x+l.width/2-r/2,y:p-a.y+l.height+8}}}))}function Be(e){let t=e.title;if(!t.length)return M;let r=`__tooltip_${ya++}`,o=Ct(r,"inline"),n=U(".md-typeset",o);return n.innerHTML=t,H(()=>{let i=new x;return i.subscribe({next({offset:s}){o.style.setProperty("--md-tooltip-x",`${s.x}px`),o.style.setProperty("--md-tooltip-y",`${s.y}px`)},complete(){o.style.removeProperty("--md-tooltip-x"),o.style.removeProperty("--md-tooltip-y")}}),L(i.pipe(v(({active:s})=>s)),i.pipe(ye(250),v(({active:s})=>!s))).subscribe({next({active:s}){s?(e.insertAdjacentElement("afterend",o),e.setAttribute("aria-describedby",r),e.removeAttribute("title")):(o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t))},complete(){o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t)}}),i.pipe(Le(16,ge)).subscribe(({active:s})=>{o.classList.toggle("md-tooltip--active",s)}),i.pipe(_t(125,ge),v(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:s})=>s)).subscribe({next(s){s?o.style.setProperty("--md-tooltip-0",`${-s}px`):o.style.removeProperty("--md-tooltip-0")},complete(){o.style.removeProperty("--md-tooltip-0")}}),Ea(o,e).pipe(T(s=>i.next(s)),A(()=>i.complete()),m(s=>P({ref:e},s)))}).pipe(qe(ie))}function wa(e,t){let r=H(()=>B([Ko(e),et(t)])).pipe(m(([{x:o,y:n},i])=>{let{width:s,height:a}=le(e);return{x:o-i.x+s/2,y:n-i.y+a/2}}));return vt(e).pipe(w(o=>r.pipe(m(n=>({active:o,offset:n})),ue(+!o||1/0))))}function xn(e,t,{target$:r}){let[o,n]=Array.from(e.children);return H(()=>{let i=new x,s=i.pipe(ee(),oe(!0));return i.subscribe({next({offset:a}){e.style.setProperty("--md-tooltip-x",`${a.x}px`),e.style.setProperty("--md-tooltip-y",`${a.y}px`)},complete(){e.style.removeProperty("--md-tooltip-x"),e.style.removeProperty("--md-tooltip-y")}}),yt(e).pipe(j(s)).subscribe(a=>{e.toggleAttribute("data-md-visible",a)}),L(i.pipe(v(({active:a})=>a)),i.pipe(ye(250),v(({active:a})=>!a))).subscribe({next({active:a}){a?e.prepend(o):o.remove()},complete(){e.prepend(o)}}),i.pipe(Le(16,ge)).subscribe(({active:a})=>{o.classList.toggle("md-tooltip--active",a)}),i.pipe(_t(125,ge),v(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:a})=>a)).subscribe({next(a){a?e.style.setProperty("--md-tooltip-0",`${-a}px`):e.style.removeProperty("--md-tooltip-0")},complete(){e.style.removeProperty("--md-tooltip-0")}}),h(n,"click").pipe(j(s),v(a=>!(a.metaKey||a.ctrlKey))).subscribe(a=>{a.stopPropagation(),a.preventDefault()}),h(n,"mousedown").pipe(j(s),ae(i)).subscribe(([a,{active:c}])=>{var p;if(a.button!==0||a.metaKey||a.ctrlKey)a.preventDefault();else if(c){a.preventDefault();let l=e.parentElement.closest(".md-annotation");l instanceof HTMLElement?l.focus():(p=Ie())==null||p.blur()}}),r.pipe(j(s),v(a=>a===o),Qe(125)).subscribe(()=>e.focus()),wa(e,t).pipe(T(a=>i.next(a)),A(()=>i.complete()),m(a=>P({ref:e},a)))})}function Ta(e){return e.tagName==="CODE"?W(".c, .c1, .cm",e):[e]}function Sa(e){let t=[];for(let r of Ta(e)){let o=[],n=document.createNodeIterator(r,NodeFilter.SHOW_TEXT);for(let i=n.nextNode();i;i=n.nextNode())o.push(i);for(let i of o){let s;for(;s=/(\(\d+\))(!)?/.exec(i.textContent);){let[,a,c]=s;if(typeof c=="undefined"){let p=i.splitText(s.index);i=p.splitText(a.length),t.push(p)}else{i.textContent=a,t.push(i);break}}}}return t}function yn(e,t){t.append(...Array.from(e.childNodes))}function fr(e,t,{target$:r,print$:o}){let n=t.closest("[id]"),i=n==null?void 0:n.id,s=new Map;for(let a of Sa(t)){let[,c]=a.textContent.match(/\((\d+)\)/);ce(`:scope > li:nth-child(${c})`,e)&&(s.set(c,un(c,i)),a.replaceWith(s.get(c)))}return s.size===0?M:H(()=>{let a=new x,c=a.pipe(ee(),oe(!0)),p=[];for(let[l,f]of s)p.push([U(".md-typeset",f),U(`:scope > li:nth-child(${l})`,e)]);return o.pipe(j(c)).subscribe(l=>{e.hidden=!l,e.classList.toggle("md-annotation-list",l);for(let[f,u]of p)l?yn(f,u):yn(u,f)}),L(...[...s].map(([,l])=>xn(l,t,{target$:r}))).pipe(A(()=>a.complete()),de())})}function En(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return En(t)}}function wn(e,t){return H(()=>{let r=En(e);return typeof r!="undefined"?fr(r,e,t):M})}var Tn=jt(Kr());var Oa=0;function Sn(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return Sn(t)}}function Ma(e){return Se(e).pipe(m(({width:t})=>({scrollable:xt(e).width>t})),te("scrollable"))}function On(e,t){let{matches:r}=matchMedia("(hover)"),o=H(()=>{let n=new x,i=n.pipe(Rr(1));n.subscribe(({scrollable:c})=>{c&&r?e.setAttribute("tabindex","0"):e.removeAttribute("tabindex")});let s=[];if(Tn.default.isSupported()&&(e.closest(".copy")||G("content.code.copy")&&!e.closest(".no-copy"))){let c=e.closest("pre");c.id=`__code_${Oa++}`;let p=dn(c.id);c.insertBefore(p,e),G("content.tooltips")&&s.push(Be(p))}let a=e.closest(".highlight");if(a instanceof HTMLElement){let c=Sn(a);if(typeof c!="undefined"&&(a.classList.contains("annotate")||G("content.code.annotate"))){let p=fr(c,e,t);s.push(Se(a).pipe(j(i),m(({width:l,height:f})=>l&&f),X(),w(l=>l?p:M)))}}return Ma(e).pipe(T(c=>n.next(c)),A(()=>n.complete()),m(c=>P({ref:e},c)),Re(...s))});return G("content.lazy")?yt(e).pipe(v(n=>n),ue(1),w(()=>o)):o}function La(e,{target$:t,print$:r}){let o=!0;return L(t.pipe(m(n=>n.closest("details:not([open])")),v(n=>e===n),m(()=>({action:"open",reveal:!0}))),r.pipe(v(n=>n||!o),T(()=>o=e.open),m(n=>({action:n?"open":"close"}))))}function Mn(e,t){return H(()=>{let r=new x;return r.subscribe(({action:o,reveal:n})=>{e.toggleAttribute("open",o==="open"),n&&e.scrollIntoView()}),La(e,t).pipe(T(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))})}var Ln=".node circle,.node ellipse,.node path,.node polygon,.node rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}marker{fill:var(--md-mermaid-edge-color)!important}.edgeLabel .label rect{fill:#0000}.label{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.label foreignObject{line-height:normal;overflow:visible}.label div .edgeLabel{color:var(--md-mermaid-label-fg-color)}.edgeLabel,.edgeLabel rect,.label div .edgeLabel{background-color:var(--md-mermaid-label-bg-color)}.edgeLabel,.edgeLabel rect{fill:var(--md-mermaid-label-bg-color);color:var(--md-mermaid-edge-color)}.edgePath .path,.flowchart-link{stroke:var(--md-mermaid-edge-color);stroke-width:.05rem}.edgePath .arrowheadPath{fill:var(--md-mermaid-edge-color);stroke:none}.cluster rect{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}.cluster span{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}g #flowchart-circleEnd,g #flowchart-circleStart,g #flowchart-crossEnd,g #flowchart-crossStart,g #flowchart-pointEnd,g #flowchart-pointStart{stroke:none}g.classGroup line,g.classGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.classGroup text{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.classLabel .box{fill:var(--md-mermaid-label-bg-color);background-color:var(--md-mermaid-label-bg-color);opacity:1}.classLabel .label{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node .divider{stroke:var(--md-mermaid-node-fg-color)}.relation{stroke:var(--md-mermaid-edge-color)}.cardinality{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.cardinality text{fill:inherit!important}defs #classDiagram-compositionEnd,defs #classDiagram-compositionStart,defs #classDiagram-dependencyEnd,defs #classDiagram-dependencyStart,defs #classDiagram-extensionEnd,defs #classDiagram-extensionStart{fill:var(--md-mermaid-edge-color)!important;stroke:var(--md-mermaid-edge-color)!important}defs #classDiagram-aggregationEnd,defs #classDiagram-aggregationStart{fill:var(--md-mermaid-label-bg-color)!important;stroke:var(--md-mermaid-edge-color)!important}g.stateGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.stateGroup .state-title{fill:var(--md-mermaid-label-fg-color)!important;font-family:var(--md-mermaid-font-family)}g.stateGroup .composit{fill:var(--md-mermaid-label-bg-color)}.nodeLabel{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node circle.state-end,.node circle.state-start,.start-state{fill:var(--md-mermaid-edge-color);stroke:none}.end-state-inner,.end-state-outer{fill:var(--md-mermaid-edge-color)}.end-state-inner,.node circle.state-end{stroke:var(--md-mermaid-label-bg-color)}.transition{stroke:var(--md-mermaid-edge-color)}[id^=state-fork] rect,[id^=state-join] rect{fill:var(--md-mermaid-edge-color)!important;stroke:none!important}.statediagram-cluster.statediagram-cluster .inner{fill:var(--md-default-bg-color)}.statediagram-cluster rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.statediagram-state rect.divider{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}defs #statediagram-barbEnd{stroke:var(--md-mermaid-edge-color)}.attributeBoxEven,.attributeBoxOdd{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityBox{fill:var(--md-mermaid-label-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityLabel{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.relationshipLabelBox{fill:var(--md-mermaid-label-bg-color);fill-opacity:1;background-color:var(--md-mermaid-label-bg-color);opacity:1}.relationshipLabel{fill:var(--md-mermaid-label-fg-color)}.relationshipLine{stroke:var(--md-mermaid-edge-color)}defs #ONE_OR_MORE_END *,defs #ONE_OR_MORE_START *,defs #ONLY_ONE_END *,defs #ONLY_ONE_START *,defs #ZERO_OR_MORE_END *,defs #ZERO_OR_MORE_START *,defs #ZERO_OR_ONE_END *,defs #ZERO_OR_ONE_START *{stroke:var(--md-mermaid-edge-color)!important}defs #ZERO_OR_MORE_END circle,defs #ZERO_OR_MORE_START circle{fill:var(--md-mermaid-label-bg-color)}.actor{fill:var(--md-mermaid-sequence-actor-bg-color);stroke:var(--md-mermaid-sequence-actor-border-color)}text.actor>tspan{fill:var(--md-mermaid-sequence-actor-fg-color);font-family:var(--md-mermaid-font-family)}line{stroke:var(--md-mermaid-sequence-actor-line-color)}.actor-man circle,.actor-man line{fill:var(--md-mermaid-sequence-actorman-bg-color);stroke:var(--md-mermaid-sequence-actorman-line-color)}.messageLine0,.messageLine1{stroke:var(--md-mermaid-sequence-message-line-color)}.note{fill:var(--md-mermaid-sequence-note-bg-color);stroke:var(--md-mermaid-sequence-note-border-color)}.loopText,.loopText>tspan,.messageText,.noteText>tspan{stroke:none;font-family:var(--md-mermaid-font-family)!important}.messageText{fill:var(--md-mermaid-sequence-message-fg-color)}.loopText,.loopText>tspan{fill:var(--md-mermaid-sequence-loop-fg-color)}.noteText>tspan{fill:var(--md-mermaid-sequence-note-fg-color)}#arrowhead path{fill:var(--md-mermaid-sequence-message-line-color);stroke:none}.loopLine{fill:var(--md-mermaid-sequence-loop-bg-color);stroke:var(--md-mermaid-sequence-loop-border-color)}.labelBox{fill:var(--md-mermaid-sequence-label-bg-color);stroke:none}.labelText,.labelText>span{fill:var(--md-mermaid-sequence-label-fg-color);font-family:var(--md-mermaid-font-family)}.sequenceNumber{fill:var(--md-mermaid-sequence-number-fg-color)}rect.rect{fill:var(--md-mermaid-sequence-box-bg-color);stroke:none}rect.rect+text.text{fill:var(--md-mermaid-sequence-box-fg-color)}defs #sequencenumber{fill:var(--md-mermaid-sequence-number-bg-color)!important}";var Qr,Aa=0;function Ca(){return typeof mermaid=="undefined"||mermaid instanceof Element?gt("https://unpkg.com/mermaid@10.6.1/dist/mermaid.min.js"):R(void 0)}function _n(e){return e.classList.remove("mermaid"),Qr||(Qr=Ca().pipe(T(()=>mermaid.initialize({startOnLoad:!1,themeCSS:Ln,sequence:{actorFontSize:"16px",messageFontSize:"16px",noteFontSize:"16px"}})),m(()=>{}),Z(1))),Qr.subscribe(()=>no(this,null,function*(){e.classList.add("mermaid");let t=`__mermaid_${Aa++}`,r=S("div",{class:"mermaid"}),o=e.textContent,{svg:n,fn:i}=yield mermaid.render(t,o),s=r.attachShadow({mode:"closed"});s.innerHTML=n,e.replaceWith(r),i==null||i(s)})),Qr.pipe(m(()=>({ref:e})))}var An=S("table");function Cn(e){return e.replaceWith(An),An.replaceWith(vn(e)),R({ref:e})}function ka(e){let t=e.find(r=>r.checked)||e[0];return L(...e.map(r=>h(r,"change").pipe(m(()=>U(`label[for="${r.id}"]`))))).pipe(q(U(`label[for="${t.id}"]`)),m(r=>({active:r})))}function kn(e,{viewport$:t,target$:r}){let o=U(".tabbed-labels",e),n=W(":scope > input",e),i=zr("prev");e.append(i);let s=zr("next");return e.append(s),H(()=>{let a=new x,c=a.pipe(ee(),oe(!0));B([a,Se(e)]).pipe(j(c),Le(1,ge)).subscribe({next([{active:p},l]){let f=Ue(p),{width:u}=le(p);e.style.setProperty("--md-indicator-x",`${f.x}px`),e.style.setProperty("--md-indicator-width",`${u}px`);let d=ir(o);(f.xd.x+l.width)&&o.scrollTo({left:Math.max(0,f.x-16),behavior:"smooth"})},complete(){e.style.removeProperty("--md-indicator-x"),e.style.removeProperty("--md-indicator-width")}}),B([et(o),Se(o)]).pipe(j(c)).subscribe(([p,l])=>{let f=xt(o);i.hidden=p.x<16,s.hidden=p.x>f.width-l.width-16}),L(h(i,"click").pipe(m(()=>-1)),h(s,"click").pipe(m(()=>1))).pipe(j(c)).subscribe(p=>{let{width:l}=le(o);o.scrollBy({left:l*p,behavior:"smooth"})}),r.pipe(j(c),v(p=>n.includes(p))).subscribe(p=>p.click()),o.classList.add("tabbed-labels--linked");for(let p of n){let l=U(`label[for="${p.id}"]`);l.replaceChildren(S("a",{href:`#${l.htmlFor}`,tabIndex:-1},...Array.from(l.childNodes))),h(l.firstElementChild,"click").pipe(j(c),v(f=>!(f.metaKey||f.ctrlKey)),T(f=>{f.preventDefault(),f.stopPropagation()})).subscribe(()=>{history.replaceState({},"",`#${l.htmlFor}`),l.click()})}return G("content.tabs.link")&&a.pipe(Ee(1),ae(t)).subscribe(([{active:p},{offset:l}])=>{let f=p.innerText.trim();if(p.hasAttribute("data-md-switching"))p.removeAttribute("data-md-switching");else{let u=e.offsetTop-l.y;for(let y of W("[data-tabs]"))for(let b of W(":scope > input",y)){let D=U(`label[for="${b.id}"]`);if(D!==p&&D.innerText.trim()===f){D.setAttribute("data-md-switching",""),b.click();break}}window.scrollTo({top:e.offsetTop-u});let d=__md_get("__tabs")||[];__md_set("__tabs",[...new Set([f,...d])])}}),a.pipe(j(c)).subscribe(()=>{for(let p of W("audio, video",e))p.pause()}),ka(n).pipe(T(p=>a.next(p)),A(()=>a.complete()),m(p=>P({ref:e},p)))}).pipe(qe(ie))}function Hn(e,{viewport$:t,target$:r,print$:o}){return L(...W(".annotate:not(.highlight)",e).map(n=>wn(n,{target$:r,print$:o})),...W("pre:not(.mermaid) > code",e).map(n=>On(n,{target$:r,print$:o})),...W("pre.mermaid",e).map(n=>_n(n)),...W("table:not([class])",e).map(n=>Cn(n)),...W("details",e).map(n=>Mn(n,{target$:r,print$:o})),...W("[data-tabs]",e).map(n=>kn(n,{viewport$:t,target$:r})),...W("[title]",e).filter(()=>G("content.tooltips")).map(n=>Be(n)))}function Ha(e,{alert$:t}){return t.pipe(w(r=>L(R(!0),R(!1).pipe(Qe(2e3))).pipe(m(o=>({message:r,active:o})))))}function $n(e,t){let r=U(".md-typeset",e);return H(()=>{let o=new x;return o.subscribe(({message:n,active:i})=>{e.classList.toggle("md-dialog--active",i),r.textContent=n}),Ha(e,t).pipe(T(n=>o.next(n)),A(()=>o.complete()),m(n=>P({ref:e},n)))})}function $a({viewport$:e}){if(!G("header.autohide"))return R(!1);let t=e.pipe(m(({offset:{y:n}})=>n),Ce(2,1),m(([n,i])=>[nMath.abs(i-n.y)>100),m(([,[n]])=>n),X()),o=Ne("search");return B([e,o]).pipe(m(([{offset:n},i])=>n.y>400&&!i),X(),w(n=>n?r:R(!1)),q(!1))}function Pn(e,t){return H(()=>B([Se(e),$a(t)])).pipe(m(([{height:r},o])=>({height:r,hidden:o})),X((r,o)=>r.height===o.height&&r.hidden===o.hidden),Z(1))}function Rn(e,{header$:t,main$:r}){return H(()=>{let o=new x,n=o.pipe(ee(),oe(!0));o.pipe(te("active"),Ze(t)).subscribe(([{active:s},{hidden:a}])=>{e.classList.toggle("md-header--shadow",s&&!a),e.hidden=a});let i=fe(W("[title]",e)).pipe(v(()=>G("content.tooltips")),re(s=>Be(s)));return r.subscribe(o),t.pipe(j(n),m(s=>P({ref:e},s)),Re(i.pipe(j(n))))})}function Pa(e,{viewport$:t,header$:r}){return mr(e,{viewport$:t,header$:r}).pipe(m(({offset:{y:o}})=>{let{height:n}=le(e);return{active:o>=n}}),te("active"))}function In(e,t){return H(()=>{let r=new x;r.subscribe({next({active:n}){e.classList.toggle("md-header__title--active",n)},complete(){e.classList.remove("md-header__title--active")}});let o=ce(".md-content h1");return typeof o=="undefined"?M:Pa(o,t).pipe(T(n=>r.next(n)),A(()=>r.complete()),m(n=>P({ref:e},n)))})}function Fn(e,{viewport$:t,header$:r}){let o=r.pipe(m(({height:i})=>i),X()),n=o.pipe(w(()=>Se(e).pipe(m(({height:i})=>({top:e.offsetTop,bottom:e.offsetTop+i})),te("bottom"))));return B([o,n,t]).pipe(m(([i,{top:s,bottom:a},{offset:{y:c},size:{height:p}}])=>(p=Math.max(0,p-Math.max(0,s-c,i)-Math.max(0,p+c-a)),{offset:s-i,height:p,active:s-i<=c})),X((i,s)=>i.offset===s.offset&&i.height===s.height&&i.active===s.active))}function Ra(e){let t=__md_get("__palette")||{index:e.findIndex(o=>matchMedia(o.getAttribute("data-md-color-media")).matches)},r=Math.max(0,Math.min(t.index,e.length-1));return R(...e).pipe(re(o=>h(o,"change").pipe(m(()=>o))),q(e[r]),m(o=>({index:e.indexOf(o),color:{media:o.getAttribute("data-md-color-media"),scheme:o.getAttribute("data-md-color-scheme"),primary:o.getAttribute("data-md-color-primary"),accent:o.getAttribute("data-md-color-accent")}})),Z(1))}function jn(e){let t=W("input",e),r=S("meta",{name:"theme-color"});document.head.appendChild(r);let o=S("meta",{name:"color-scheme"});document.head.appendChild(o);let n=At("(prefers-color-scheme: light)");return H(()=>{let i=new x;return i.subscribe(s=>{if(document.body.setAttribute("data-md-color-switching",""),s.color.media==="(prefers-color-scheme)"){let a=matchMedia("(prefers-color-scheme: light)"),c=document.querySelector(a.matches?"[data-md-color-media='(prefers-color-scheme: light)']":"[data-md-color-media='(prefers-color-scheme: dark)']");s.color.scheme=c.getAttribute("data-md-color-scheme"),s.color.primary=c.getAttribute("data-md-color-primary"),s.color.accent=c.getAttribute("data-md-color-accent")}for(let[a,c]of Object.entries(s.color))document.body.setAttribute(`data-md-color-${a}`,c);for(let a=0;a{let s=Oe("header"),a=window.getComputedStyle(s);return o.content=a.colorScheme,a.backgroundColor.match(/\d+/g).map(c=>(+c).toString(16).padStart(2,"0")).join("")})).subscribe(s=>r.content=`#${s}`),i.pipe(Me(ie)).subscribe(()=>{document.body.removeAttribute("data-md-color-switching")}),Ra(t).pipe(j(n.pipe(Ee(1))),at(),T(s=>i.next(s)),A(()=>i.complete()),m(s=>P({ref:e},s)))})}function Wn(e,{progress$:t}){return H(()=>{let r=new x;return r.subscribe(({value:o})=>{e.style.setProperty("--md-progress-value",`${o}`)}),t.pipe(T(o=>r.next({value:o})),A(()=>r.complete()),m(o=>({ref:e,value:o})))})}var Yr=jt(Kr());function Ia(e){e.setAttribute("data-md-copying","");let t=e.closest("[data-copy]"),r=t?t.getAttribute("data-copy"):e.innerText;return e.removeAttribute("data-md-copying"),r.trimEnd()}function Un({alert$:e}){Yr.default.isSupported()&&new I(t=>{new Yr.default("[data-clipboard-target], [data-clipboard-text]",{text:r=>r.getAttribute("data-clipboard-text")||Ia(U(r.getAttribute("data-clipboard-target")))}).on("success",r=>t.next(r))}).pipe(T(t=>{t.trigger.focus()}),m(()=>we("clipboard.copied"))).subscribe(e)}function Fa(e){if(e.length<2)return[""];let[t,r]=[...e].sort((n,i)=>n.length-i.length).map(n=>n.replace(/[^/]+$/,"")),o=0;if(t===r)o=t.length;else for(;t.charCodeAt(o)===r.charCodeAt(o);)o++;return e.map(n=>n.replace(t.slice(0,o),""))}function ur(e){let t=__md_get("__sitemap",sessionStorage,e);if(t)return R(t);{let r=he();return on(new URL("sitemap.xml",e||r.base)).pipe(m(o=>Fa(W("loc",o).map(n=>n.textContent))),xe(()=>M),$e([]),T(o=>__md_set("__sitemap",o,sessionStorage,e)))}}function Nn(e){let t=ce("[rel=canonical]",e);typeof t!="undefined"&&(t.href=t.href.replace("//localhost:","//127.0.0.1:"));let r=new Map;for(let o of W(":scope > *",e)){let n=o.outerHTML;for(let i of["href","src"]){let s=o.getAttribute(i);if(s===null)continue;let a=new URL(s,t==null?void 0:t.href),c=o.cloneNode();c.setAttribute(i,`${a}`),n=c.outerHTML;break}r.set(n,o)}return r}function Dn({location$:e,viewport$:t,progress$:r}){let o=he();if(location.protocol==="file:")return M;let n=ur().pipe(m(l=>l.map(f=>`${new URL(f,o.base)}`))),i=h(document.body,"click").pipe(ae(n),w(([l,f])=>{if(!(l.target instanceof Element))return M;let u=l.target.closest("a");if(u===null)return M;if(u.target||l.metaKey||l.ctrlKey)return M;let d=new URL(u.href);return d.search=d.hash="",f.includes(`${d}`)?(l.preventDefault(),R(new URL(u.href))):M}),de());i.pipe(ue(1)).subscribe(()=>{let l=ce("link[rel=icon]");typeof l!="undefined"&&(l.href=l.href)}),h(window,"beforeunload").subscribe(()=>{history.scrollRestoration="auto"}),i.pipe(ae(t)).subscribe(([l,{offset:f}])=>{history.scrollRestoration="manual",history.replaceState(f,""),history.pushState(null,"",l)}),i.subscribe(e);let s=e.pipe(q(me()),te("pathname"),Ee(1),w(l=>lr(l,{progress$:r}).pipe(xe(()=>(st(l,!0),M))))),a=new DOMParser,c=s.pipe(w(l=>l.text()),w(l=>{let f=a.parseFromString(l,"text/html");for(let b of["[data-md-component=announce]","[data-md-component=container]","[data-md-component=header-topic]","[data-md-component=outdated]","[data-md-component=logo]","[data-md-component=skip]",...G("navigation.tabs.sticky")?["[data-md-component=tabs]"]:[]]){let D=ce(b),Q=ce(b,f);typeof D!="undefined"&&typeof Q!="undefined"&&D.replaceWith(Q)}let u=Nn(document.head),d=Nn(f.head);for(let[b,D]of d)D.getAttribute("rel")==="stylesheet"||D.hasAttribute("src")||(u.has(b)?u.delete(b):document.head.appendChild(D));for(let b of u.values())b.getAttribute("rel")==="stylesheet"||b.hasAttribute("src")||b.remove();let y=Oe("container");return We(W("script",y)).pipe(w(b=>{let D=f.createElement("script");if(b.src){for(let Q of b.getAttributeNames())D.setAttribute(Q,b.getAttribute(Q));return b.replaceWith(D),new I(Q=>{D.onload=()=>Q.complete()})}else return D.textContent=b.textContent,b.replaceWith(D),M}),ee(),oe(f))}),de());return h(window,"popstate").pipe(m(me)).subscribe(e),e.pipe(q(me()),Ce(2,1),v(([l,f])=>l.pathname===f.pathname&&l.hash!==f.hash),m(([,l])=>l)).subscribe(l=>{var f,u;history.state!==null||!l.hash?window.scrollTo(0,(u=(f=history.state)==null?void 0:f.y)!=null?u:0):(history.scrollRestoration="auto",pr(l.hash),history.scrollRestoration="manual")}),e.pipe(Ir(i),q(me()),Ce(2,1),v(([l,f])=>l.pathname===f.pathname&&l.hash===f.hash),m(([,l])=>l)).subscribe(l=>{history.scrollRestoration="auto",pr(l.hash),history.scrollRestoration="manual",history.back()}),c.pipe(ae(e)).subscribe(([,l])=>{var f,u;history.state!==null||!l.hash?window.scrollTo(0,(u=(f=history.state)==null?void 0:f.y)!=null?u:0):pr(l.hash)}),t.pipe(te("offset"),ye(100)).subscribe(({offset:l})=>{history.replaceState(l,"")}),c}var qn=jt(zn());function Kn(e){let t=e.separator.split("|").map(n=>n.replace(/(\(\?[!=<][^)]+\))/g,"").length===0?"\uFFFD":n).join("|"),r=new RegExp(t,"img"),o=(n,i,s)=>`${i}${s}`;return n=>{n=n.replace(/[\s*+\-:~^]+/g," ").trim();let i=new RegExp(`(^|${e.separator}|)(${n.replace(/[|\\{}()[\]^$+*?.-]/g,"\\$&").replace(r,"|")})`,"img");return s=>(0,qn.default)(s).replace(i,o).replace(/<\/mark>(\s+)]*>/img,"$1")}}function Ht(e){return e.type===1}function dr(e){return e.type===3}function Qn(e,t){let r=ln(e);return L(R(location.protocol!=="file:"),Ne("search")).pipe(Pe(o=>o),w(()=>t)).subscribe(({config:o,docs:n})=>r.next({type:0,data:{config:o,docs:n,options:{suggest:G("search.suggest")}}})),r}function Yn({document$:e}){let t=he(),r=De(new URL("../versions.json",t.base)).pipe(xe(()=>M)),o=r.pipe(m(n=>{let[,i]=t.base.match(/([^/]+)\/?$/);return n.find(({version:s,aliases:a})=>s===i||a.includes(i))||n[0]}));r.pipe(m(n=>new Map(n.map(i=>[`${new URL(`../${i.version}/`,t.base)}`,i]))),w(n=>h(document.body,"click").pipe(v(i=>!i.metaKey&&!i.ctrlKey),ae(o),w(([i,s])=>{if(i.target instanceof Element){let a=i.target.closest("a");if(a&&!a.target&&n.has(a.href)){let c=a.href;return!i.target.closest(".md-version")&&n.get(c)===s?M:(i.preventDefault(),R(c))}}return M}),w(i=>{let{version:s}=n.get(i);return ur(new URL(i)).pipe(m(a=>{let p=me().href.replace(t.base,"");return a.includes(p.split("#")[0])?new URL(`../${s}/${p}`,t.base):new URL(i)}))})))).subscribe(n=>st(n,!0)),B([r,o]).subscribe(([n,i])=>{U(".md-header__topic").appendChild(gn(n,i))}),e.pipe(w(()=>o)).subscribe(n=>{var s;let i=__md_get("__outdated",sessionStorage);if(i===null){i=!0;let a=((s=t.version)==null?void 0:s.default)||"latest";Array.isArray(a)||(a=[a]);e:for(let c of a)for(let p of n.aliases.concat(n.version))if(new RegExp(c,"i").test(p)){i=!1;break e}__md_set("__outdated",i,sessionStorage)}if(i)for(let a of ne("outdated"))a.hidden=!1})}function Da(e,{worker$:t}){let{searchParams:r}=me();r.has("q")&&(Ye("search",!0),e.value=r.get("q"),e.focus(),Ne("search").pipe(Pe(i=>!i)).subscribe(()=>{let i=me();i.searchParams.delete("q"),history.replaceState({},"",`${i}`)}));let o=vt(e),n=L(t.pipe(Pe(Ht)),h(e,"keyup"),o).pipe(m(()=>e.value),X());return B([n,o]).pipe(m(([i,s])=>({value:i,focus:s})),Z(1))}function Bn(e,{worker$:t}){let r=new x,o=r.pipe(ee(),oe(!0));B([t.pipe(Pe(Ht)),r],(i,s)=>s).pipe(te("value")).subscribe(({value:i})=>t.next({type:2,data:i})),r.pipe(te("focus")).subscribe(({focus:i})=>{i&&Ye("search",i)}),h(e.form,"reset").pipe(j(o)).subscribe(()=>e.focus());let n=U("header [for=__search]");return h(n,"click").subscribe(()=>e.focus()),Da(e,{worker$:t}).pipe(T(i=>r.next(i)),A(()=>r.complete()),m(i=>P({ref:e},i)),Z(1))}function Gn(e,{worker$:t,query$:r}){let o=new x,n=Go(e.parentElement).pipe(v(Boolean)),i=e.parentElement,s=U(":scope > :first-child",e),a=U(":scope > :last-child",e);Ne("search").subscribe(l=>a.setAttribute("role",l?"list":"presentation")),o.pipe(ae(r),Wr(t.pipe(Pe(Ht)))).subscribe(([{items:l},{value:f}])=>{switch(l.length){case 0:s.textContent=f.length?we("search.result.none"):we("search.result.placeholder");break;case 1:s.textContent=we("search.result.one");break;default:let u=ar(l.length);s.textContent=we("search.result.other",u)}});let c=o.pipe(T(()=>a.innerHTML=""),w(({items:l})=>L(R(...l.slice(0,10)),R(...l.slice(10)).pipe(Ce(4),Nr(n),w(([f])=>f)))),m(hn),de());return c.subscribe(l=>a.appendChild(l)),c.pipe(re(l=>{let f=ce("details",l);return typeof f=="undefined"?M:h(f,"toggle").pipe(j(o),m(()=>f))})).subscribe(l=>{l.open===!1&&l.offsetTop<=i.scrollTop&&i.scrollTo({top:l.offsetTop})}),t.pipe(v(dr),m(({data:l})=>l)).pipe(T(l=>o.next(l)),A(()=>o.complete()),m(l=>P({ref:e},l)))}function Va(e,{query$:t}){return t.pipe(m(({value:r})=>{let o=me();return o.hash="",r=r.replace(/\s+/g,"+").replace(/&/g,"%26").replace(/=/g,"%3D"),o.search=`q=${r}`,{url:o}}))}function Jn(e,t){let r=new x,o=r.pipe(ee(),oe(!0));return r.subscribe(({url:n})=>{e.setAttribute("data-clipboard-text",e.href),e.href=`${n}`}),h(e,"click").pipe(j(o)).subscribe(n=>n.preventDefault()),Va(e,t).pipe(T(n=>r.next(n)),A(()=>r.complete()),m(n=>P({ref:e},n)))}function Xn(e,{worker$:t,keyboard$:r}){let o=new x,n=Oe("search-query"),i=L(h(n,"keydown"),h(n,"focus")).pipe(Me(ie),m(()=>n.value),X());return o.pipe(Ze(i),m(([{suggest:a},c])=>{let p=c.split(/([\s-]+)/);if(a!=null&&a.length&&p[p.length-1]){let l=a[a.length-1];l.startsWith(p[p.length-1])&&(p[p.length-1]=l)}else p.length=0;return p})).subscribe(a=>e.innerHTML=a.join("").replace(/\s/g," ")),r.pipe(v(({mode:a})=>a==="search")).subscribe(a=>{switch(a.type){case"ArrowRight":e.innerText.length&&n.selectionStart===n.value.length&&(n.value=e.innerText);break}}),t.pipe(v(dr),m(({data:a})=>a)).pipe(T(a=>o.next(a)),A(()=>o.complete()),m(()=>({ref:e})))}function Zn(e,{index$:t,keyboard$:r}){let o=he();try{let n=Qn(o.search,t),i=Oe("search-query",e),s=Oe("search-result",e);h(e,"click").pipe(v(({target:c})=>c instanceof Element&&!!c.closest("a"))).subscribe(()=>Ye("search",!1)),r.pipe(v(({mode:c})=>c==="search")).subscribe(c=>{let p=Ie();switch(c.type){case"Enter":if(p===i){let l=new Map;for(let f of W(":first-child [href]",s)){let u=f.firstElementChild;l.set(f,parseFloat(u.getAttribute("data-md-score")))}if(l.size){let[[f]]=[...l].sort(([,u],[,d])=>d-u);f.click()}c.claim()}break;case"Escape":case"Tab":Ye("search",!1),i.blur();break;case"ArrowUp":case"ArrowDown":if(typeof p=="undefined")i.focus();else{let l=[i,...W(":not(details) > [href], summary, details[open] [href]",s)],f=Math.max(0,(Math.max(0,l.indexOf(p))+l.length+(c.type==="ArrowUp"?-1:1))%l.length);l[f].focus()}c.claim();break;default:i!==Ie()&&i.focus()}}),r.pipe(v(({mode:c})=>c==="global")).subscribe(c=>{switch(c.type){case"f":case"s":case"/":i.focus(),i.select(),c.claim();break}});let a=Bn(i,{worker$:n});return L(a,Gn(s,{worker$:n,query$:a})).pipe(Re(...ne("search-share",e).map(c=>Jn(c,{query$:a})),...ne("search-suggest",e).map(c=>Xn(c,{worker$:n,keyboard$:r}))))}catch(n){return e.hidden=!0,Ke}}function ei(e,{index$:t,location$:r}){return B([t,r.pipe(q(me()),v(o=>!!o.searchParams.get("h")))]).pipe(m(([o,n])=>Kn(o.config)(n.searchParams.get("h"))),m(o=>{var s;let n=new Map,i=document.createNodeIterator(e,NodeFilter.SHOW_TEXT);for(let a=i.nextNode();a;a=i.nextNode())if((s=a.parentElement)!=null&&s.offsetHeight){let c=a.textContent,p=o(c);p.length>c.length&&n.set(a,p)}for(let[a,c]of n){let{childNodes:p}=S("span",null,c);a.replaceWith(...Array.from(p))}return{ref:e,nodes:n}}))}function za(e,{viewport$:t,main$:r}){let o=e.closest(".md-grid"),n=o.offsetTop-o.parentElement.offsetTop;return B([r,t]).pipe(m(([{offset:i,height:s},{offset:{y:a}}])=>(s=s+Math.min(n,Math.max(0,a-i))-n,{height:s,locked:a>=i+n})),X((i,s)=>i.height===s.height&&i.locked===s.locked))}function Br(e,o){var n=o,{header$:t}=n,r=oo(n,["header$"]);let i=U(".md-sidebar__scrollwrap",e),{y:s}=Ue(i);return H(()=>{let a=new x,c=a.pipe(ee(),oe(!0)),p=a.pipe(Le(0,ge));return p.pipe(ae(t)).subscribe({next([{height:l},{height:f}]){i.style.height=`${l-2*s}px`,e.style.top=`${f}px`},complete(){i.style.height="",e.style.top=""}}),p.pipe(Pe()).subscribe(()=>{for(let l of W(".md-nav__link--active[href]",e)){if(!l.clientHeight)continue;let f=l.closest(".md-sidebar__scrollwrap");if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:d}=le(f);f.scrollTo({top:u-d/2})}}}),fe(W("label[tabindex]",e)).pipe(re(l=>h(l,"click").pipe(Me(ie),m(()=>l),j(c)))).subscribe(l=>{let f=U(`[id="${l.htmlFor}"]`);U(`[aria-labelledby="${l.id}"]`).setAttribute("aria-expanded",`${f.checked}`)}),za(e,r).pipe(T(l=>a.next(l)),A(()=>a.complete()),m(l=>P({ref:e},l)))})}function ti(e,t){if(typeof t!="undefined"){let r=`https://api.github.com/repos/${e}/${t}`;return Lt(De(`${r}/releases/latest`).pipe(xe(()=>M),m(o=>({version:o.tag_name})),$e({})),De(r).pipe(xe(()=>M),m(o=>({stars:o.stargazers_count,forks:o.forks_count})),$e({}))).pipe(m(([o,n])=>P(P({},o),n)))}else{let r=`https://api.github.com/users/${e}`;return De(r).pipe(m(o=>({repositories:o.public_repos})),$e({}))}}function ri(e,t){let r=`https://${e}/api/v4/projects/${encodeURIComponent(t)}`;return De(r).pipe(xe(()=>M),m(({star_count:o,forks_count:n})=>({stars:o,forks:n})),$e({}))}function oi(e){let t=e.match(/^.+github\.com\/([^/]+)\/?([^/]+)?/i);if(t){let[,r,o]=t;return ti(r,o)}if(t=e.match(/^.+?([^/]*gitlab[^/]+)\/(.+?)\/?$/i),t){let[,r,o]=t;return ri(r,o)}return M}var qa;function Ka(e){return qa||(qa=H(()=>{let t=__md_get("__source",sessionStorage);if(t)return R(t);if(ne("consent").length){let o=__md_get("__consent");if(!(o&&o.github))return M}return oi(e.href).pipe(T(o=>__md_set("__source",o,sessionStorage)))}).pipe(xe(()=>M),v(t=>Object.keys(t).length>0),m(t=>({facts:t})),Z(1)))}function ni(e){let t=U(":scope > :last-child",e);return H(()=>{let r=new x;return r.subscribe(({facts:o})=>{t.appendChild(bn(o)),t.classList.add("md-source__repository--active")}),Ka(e).pipe(T(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))})}function Qa(e,{viewport$:t,header$:r}){return Se(document.body).pipe(w(()=>mr(e,{header$:r,viewport$:t})),m(({offset:{y:o}})=>({hidden:o>=10})),te("hidden"))}function ii(e,t){return H(()=>{let r=new x;return r.subscribe({next({hidden:o}){e.hidden=o},complete(){e.hidden=!1}}),(G("navigation.tabs.sticky")?R({hidden:!1}):Qa(e,t)).pipe(T(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))})}function Ya(e,{viewport$:t,header$:r}){let o=new Map,n=W("[href^=\\#]",e);for(let a of n){let c=decodeURIComponent(a.hash.substring(1)),p=ce(`[id="${c}"]`);typeof p!="undefined"&&o.set(a,p)}let i=r.pipe(te("height"),m(({height:a})=>{let c=Oe("main"),p=U(":scope > :first-child",c);return a+.8*(p.offsetTop-c.offsetTop)}),de());return Se(document.body).pipe(te("height"),w(a=>H(()=>{let c=[];return R([...o].reduce((p,[l,f])=>{for(;c.length&&o.get(c[c.length-1]).tagName>=f.tagName;)c.pop();let u=f.offsetTop;for(;!u&&f.parentElement;)f=f.parentElement,u=f.offsetTop;let d=f.offsetParent;for(;d;d=d.offsetParent)u+=d.offsetTop;return p.set([...c=[...c,l]].reverse(),u)},new Map))}).pipe(m(c=>new Map([...c].sort(([,p],[,l])=>p-l))),Ze(i),w(([c,p])=>t.pipe(Fr(([l,f],{offset:{y:u},size:d})=>{let y=u+d.height>=Math.floor(a.height);for(;f.length;){let[,b]=f[0];if(b-p=u&&!y)f=[l.pop(),...f];else break}return[l,f]},[[],[...c]]),X((l,f)=>l[0]===f[0]&&l[1]===f[1])))))).pipe(m(([a,c])=>({prev:a.map(([p])=>p),next:c.map(([p])=>p)})),q({prev:[],next:[]}),Ce(2,1),m(([a,c])=>a.prev.length{let i=new x,s=i.pipe(ee(),oe(!0));if(i.subscribe(({prev:a,next:c})=>{for(let[p]of c)p.classList.remove("md-nav__link--passed"),p.classList.remove("md-nav__link--active");for(let[p,[l]]of a.entries())l.classList.add("md-nav__link--passed"),l.classList.toggle("md-nav__link--active",p===a.length-1)}),G("toc.follow")){let a=L(t.pipe(ye(1),m(()=>{})),t.pipe(ye(250),m(()=>"smooth")));i.pipe(v(({prev:c})=>c.length>0),Ze(o.pipe(Me(ie))),ae(a)).subscribe(([[{prev:c}],p])=>{let[l]=c[c.length-1];if(l.offsetHeight){let f=sr(l);if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:d}=le(f);f.scrollTo({top:u-d/2,behavior:p})}}})}return G("navigation.tracking")&&t.pipe(j(s),te("offset"),ye(250),Ee(1),j(n.pipe(Ee(1))),at({delay:250}),ae(i)).subscribe(([,{prev:a}])=>{let c=me(),p=a[a.length-1];if(p&&p.length){let[l]=p,{hash:f}=new URL(l.href);c.hash!==f&&(c.hash=f,history.replaceState({},"",`${c}`))}else c.hash="",history.replaceState({},"",`${c}`)}),Ya(e,{viewport$:t,header$:r}).pipe(T(a=>i.next(a)),A(()=>i.complete()),m(a=>P({ref:e},a)))})}function Ba(e,{viewport$:t,main$:r,target$:o}){let n=t.pipe(m(({offset:{y:s}})=>s),Ce(2,1),m(([s,a])=>s>a&&a>0),X()),i=r.pipe(m(({active:s})=>s));return B([i,n]).pipe(m(([s,a])=>!(s&&a)),X(),j(o.pipe(Ee(1))),oe(!0),at({delay:250}),m(s=>({hidden:s})))}function si(e,{viewport$:t,header$:r,main$:o,target$:n}){let i=new x,s=i.pipe(ee(),oe(!0));return i.subscribe({next({hidden:a}){e.hidden=a,a?(e.setAttribute("tabindex","-1"),e.blur()):e.removeAttribute("tabindex")},complete(){e.style.top="",e.hidden=!0,e.removeAttribute("tabindex")}}),r.pipe(j(s),te("height")).subscribe(({height:a})=>{e.style.top=`${a+16}px`}),h(e,"click").subscribe(a=>{a.preventDefault(),window.scrollTo({top:0})}),Ba(e,{viewport$:t,main$:o,target$:n}).pipe(T(a=>i.next(a)),A(()=>i.complete()),m(a=>P({ref:e},a)))}function ci({document$:e}){e.pipe(w(()=>W(".md-ellipsis")),re(t=>yt(t).pipe(j(e.pipe(Ee(1))),v(r=>r),m(()=>t),ue(1))),v(t=>t.offsetWidth{let r=t.innerText,o=t.closest("a")||t;return o.title=r,Be(o).pipe(j(e.pipe(Ee(1))),A(()=>o.removeAttribute("title")))})).subscribe(),e.pipe(w(()=>W(".md-status")),re(t=>Be(t))).subscribe()}function pi({document$:e,tablet$:t}){e.pipe(w(()=>W(".md-toggle--indeterminate")),T(r=>{r.indeterminate=!0,r.checked=!1}),re(r=>h(r,"change").pipe(Ur(()=>r.classList.contains("md-toggle--indeterminate")),m(()=>r))),ae(t)).subscribe(([r,o])=>{r.classList.remove("md-toggle--indeterminate"),o&&(r.checked=!1)})}function Ga(){return/(iPad|iPhone|iPod)/.test(navigator.userAgent)}function li({document$:e}){e.pipe(w(()=>W("[data-md-scrollfix]")),T(t=>t.removeAttribute("data-md-scrollfix")),v(Ga),re(t=>h(t,"touchstart").pipe(m(()=>t)))).subscribe(t=>{let r=t.scrollTop;r===0?t.scrollTop=1:r+t.offsetHeight===t.scrollHeight&&(t.scrollTop=r-1)})}function mi({viewport$:e,tablet$:t}){B([Ne("search"),t]).pipe(m(([r,o])=>r&&!o),w(r=>R(r).pipe(Qe(r?400:100))),ae(e)).subscribe(([r,{offset:{y:o}}])=>{if(r)document.body.setAttribute("data-md-scrolllock",""),document.body.style.top=`-${o}px`;else{let n=-1*parseInt(document.body.style.top,10);document.body.removeAttribute("data-md-scrolllock"),document.body.style.top="",n&&window.scrollTo(0,n)}})}Object.entries||(Object.entries=function(e){let t=[];for(let r of Object.keys(e))t.push([r,e[r]]);return t});Object.values||(Object.values=function(e){let t=[];for(let r of Object.keys(e))t.push(e[r]);return t});typeof Element!="undefined"&&(Element.prototype.scrollTo||(Element.prototype.scrollTo=function(e,t){typeof e=="object"?(this.scrollLeft=e.left,this.scrollTop=e.top):(this.scrollLeft=e,this.scrollTop=t)}),Element.prototype.replaceWith||(Element.prototype.replaceWith=function(...e){let t=this.parentNode;if(t){e.length===0&&t.removeChild(this);for(let r=e.length-1;r>=0;r--){let o=e[r];typeof o=="string"?o=document.createTextNode(o):o.parentNode&&o.parentNode.removeChild(o),r?t.insertBefore(this.previousSibling,o):t.replaceChild(o,this)}}}));function Ja(){return location.protocol==="file:"?gt(`${new URL("search/search_index.js",Gr.base)}`).pipe(m(()=>__index),Z(1)):De(new URL("search/search_index.json",Gr.base))}document.documentElement.classList.remove("no-js");document.documentElement.classList.add("js");var rt=zo(),Pt=Zo(),wt=tn(Pt),Jr=Xo(),_e=pn(),hr=At("(min-width: 960px)"),ui=At("(min-width: 1220px)"),di=rn(),Gr=he(),hi=document.forms.namedItem("search")?Ja():Ke,Xr=new x;Un({alert$:Xr});var Zr=new x;G("navigation.instant")&&Dn({location$:Pt,viewport$:_e,progress$:Zr}).subscribe(rt);var fi;((fi=Gr.version)==null?void 0:fi.provider)==="mike"&&Yn({document$:rt});L(Pt,wt).pipe(Qe(125)).subscribe(()=>{Ye("drawer",!1),Ye("search",!1)});Jr.pipe(v(({mode:e})=>e==="global")).subscribe(e=>{switch(e.type){case"p":case",":let t=ce("link[rel=prev]");typeof t!="undefined"&&st(t);break;case"n":case".":let r=ce("link[rel=next]");typeof r!="undefined"&&st(r);break;case"Enter":let o=Ie();o instanceof HTMLLabelElement&&o.click()}});ci({document$:rt});pi({document$:rt,tablet$:hr});li({document$:rt});mi({viewport$:_e,tablet$:hr});var tt=Pn(Oe("header"),{viewport$:_e}),$t=rt.pipe(m(()=>Oe("main")),w(e=>Fn(e,{viewport$:_e,header$:tt})),Z(1)),Xa=L(...ne("consent").map(e=>fn(e,{target$:wt})),...ne("dialog").map(e=>$n(e,{alert$:Xr})),...ne("header").map(e=>Rn(e,{viewport$:_e,header$:tt,main$:$t})),...ne("palette").map(e=>jn(e)),...ne("progress").map(e=>Wn(e,{progress$:Zr})),...ne("search").map(e=>Zn(e,{index$:hi,keyboard$:Jr})),...ne("source").map(e=>ni(e))),Za=H(()=>L(...ne("announce").map(e=>mn(e)),...ne("content").map(e=>Hn(e,{viewport$:_e,target$:wt,print$:di})),...ne("content").map(e=>G("search.highlight")?ei(e,{index$:hi,location$:Pt}):M),...ne("header-title").map(e=>In(e,{viewport$:_e,header$:tt})),...ne("sidebar").map(e=>e.getAttribute("data-md-type")==="navigation"?Dr(ui,()=>Br(e,{viewport$:_e,header$:tt,main$:$t})):Dr(hr,()=>Br(e,{viewport$:_e,header$:tt,main$:$t}))),...ne("tabs").map(e=>ii(e,{viewport$:_e,header$:tt})),...ne("toc").map(e=>ai(e,{viewport$:_e,header$:tt,main$:$t,target$:wt})),...ne("top").map(e=>si(e,{viewport$:_e,header$:tt,main$:$t,target$:wt})))),bi=rt.pipe(w(()=>Za),Re(Xa),Z(1));bi.subscribe();window.document$=rt;window.location$=Pt;window.target$=wt;window.keyboard$=Jr;window.viewport$=_e;window.tablet$=hr;window.screen$=ui;window.print$=di;window.alert$=Xr;window.progress$=Zr;window.component$=bi;})(); +//# sourceMappingURL=bundle.7389ff0e.min.js.map + diff --git a/0.6.1/assets/javascripts/bundle.7389ff0e.min.js.map b/0.6.1/assets/javascripts/bundle.7389ff0e.min.js.map new file mode 100644 index 000000000..dbee324c2 --- /dev/null +++ b/0.6.1/assets/javascripts/bundle.7389ff0e.min.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["node_modules/focus-visible/dist/focus-visible.js", "node_modules/clipboard/dist/clipboard.js", "node_modules/escape-html/index.js", "src/templates/assets/javascripts/bundle.ts", "node_modules/rxjs/node_modules/tslib/tslib.es6.js", "node_modules/rxjs/src/internal/util/isFunction.ts", "node_modules/rxjs/src/internal/util/createErrorClass.ts", "node_modules/rxjs/src/internal/util/UnsubscriptionError.ts", "node_modules/rxjs/src/internal/util/arrRemove.ts", "node_modules/rxjs/src/internal/Subscription.ts", "node_modules/rxjs/src/internal/config.ts", "node_modules/rxjs/src/internal/scheduler/timeoutProvider.ts", "node_modules/rxjs/src/internal/util/reportUnhandledError.ts", "node_modules/rxjs/src/internal/util/noop.ts", "node_modules/rxjs/src/internal/NotificationFactories.ts", "node_modules/rxjs/src/internal/util/errorContext.ts", "node_modules/rxjs/src/internal/Subscriber.ts", "node_modules/rxjs/src/internal/symbol/observable.ts", "node_modules/rxjs/src/internal/util/identity.ts", "node_modules/rxjs/src/internal/util/pipe.ts", "node_modules/rxjs/src/internal/Observable.ts", "node_modules/rxjs/src/internal/util/lift.ts", "node_modules/rxjs/src/internal/operators/OperatorSubscriber.ts", "node_modules/rxjs/src/internal/scheduler/animationFrameProvider.ts", "node_modules/rxjs/src/internal/util/ObjectUnsubscribedError.ts", "node_modules/rxjs/src/internal/Subject.ts", "node_modules/rxjs/src/internal/scheduler/dateTimestampProvider.ts", "node_modules/rxjs/src/internal/ReplaySubject.ts", "node_modules/rxjs/src/internal/scheduler/Action.ts", "node_modules/rxjs/src/internal/scheduler/intervalProvider.ts", "node_modules/rxjs/src/internal/scheduler/AsyncAction.ts", "node_modules/rxjs/src/internal/Scheduler.ts", "node_modules/rxjs/src/internal/scheduler/AsyncScheduler.ts", "node_modules/rxjs/src/internal/scheduler/async.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameAction.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameScheduler.ts", "node_modules/rxjs/src/internal/scheduler/animationFrame.ts", "node_modules/rxjs/src/internal/observable/empty.ts", "node_modules/rxjs/src/internal/util/isScheduler.ts", "node_modules/rxjs/src/internal/util/args.ts", "node_modules/rxjs/src/internal/util/isArrayLike.ts", "node_modules/rxjs/src/internal/util/isPromise.ts", "node_modules/rxjs/src/internal/util/isInteropObservable.ts", "node_modules/rxjs/src/internal/util/isAsyncIterable.ts", "node_modules/rxjs/src/internal/util/throwUnobservableError.ts", "node_modules/rxjs/src/internal/symbol/iterator.ts", "node_modules/rxjs/src/internal/util/isIterable.ts", "node_modules/rxjs/src/internal/util/isReadableStreamLike.ts", "node_modules/rxjs/src/internal/observable/innerFrom.ts", "node_modules/rxjs/src/internal/util/executeSchedule.ts", "node_modules/rxjs/src/internal/operators/observeOn.ts", "node_modules/rxjs/src/internal/operators/subscribeOn.ts", "node_modules/rxjs/src/internal/scheduled/scheduleObservable.ts", "node_modules/rxjs/src/internal/scheduled/schedulePromise.ts", "node_modules/rxjs/src/internal/scheduled/scheduleArray.ts", "node_modules/rxjs/src/internal/scheduled/scheduleIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleAsyncIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleReadableStreamLike.ts", "node_modules/rxjs/src/internal/scheduled/scheduled.ts", "node_modules/rxjs/src/internal/observable/from.ts", "node_modules/rxjs/src/internal/observable/of.ts", "node_modules/rxjs/src/internal/observable/throwError.ts", "node_modules/rxjs/src/internal/util/EmptyError.ts", "node_modules/rxjs/src/internal/util/isDate.ts", "node_modules/rxjs/src/internal/operators/map.ts", "node_modules/rxjs/src/internal/util/mapOneOrManyArgs.ts", "node_modules/rxjs/src/internal/util/argsArgArrayOrObject.ts", "node_modules/rxjs/src/internal/util/createObject.ts", "node_modules/rxjs/src/internal/observable/combineLatest.ts", "node_modules/rxjs/src/internal/operators/mergeInternals.ts", "node_modules/rxjs/src/internal/operators/mergeMap.ts", "node_modules/rxjs/src/internal/operators/mergeAll.ts", "node_modules/rxjs/src/internal/operators/concatAll.ts", "node_modules/rxjs/src/internal/observable/concat.ts", "node_modules/rxjs/src/internal/observable/defer.ts", "node_modules/rxjs/src/internal/observable/fromEvent.ts", "node_modules/rxjs/src/internal/observable/fromEventPattern.ts", "node_modules/rxjs/src/internal/observable/timer.ts", "node_modules/rxjs/src/internal/observable/merge.ts", "node_modules/rxjs/src/internal/observable/never.ts", "node_modules/rxjs/src/internal/util/argsOrArgArray.ts", "node_modules/rxjs/src/internal/operators/filter.ts", "node_modules/rxjs/src/internal/observable/zip.ts", "node_modules/rxjs/src/internal/operators/audit.ts", "node_modules/rxjs/src/internal/operators/auditTime.ts", "node_modules/rxjs/src/internal/operators/bufferCount.ts", "node_modules/rxjs/src/internal/operators/catchError.ts", "node_modules/rxjs/src/internal/operators/scanInternals.ts", "node_modules/rxjs/src/internal/operators/combineLatest.ts", "node_modules/rxjs/src/internal/operators/combineLatestWith.ts", "node_modules/rxjs/src/internal/operators/debounceTime.ts", "node_modules/rxjs/src/internal/operators/defaultIfEmpty.ts", "node_modules/rxjs/src/internal/operators/take.ts", "node_modules/rxjs/src/internal/operators/ignoreElements.ts", "node_modules/rxjs/src/internal/operators/mapTo.ts", "node_modules/rxjs/src/internal/operators/delayWhen.ts", "node_modules/rxjs/src/internal/operators/delay.ts", "node_modules/rxjs/src/internal/operators/distinctUntilChanged.ts", "node_modules/rxjs/src/internal/operators/distinctUntilKeyChanged.ts", "node_modules/rxjs/src/internal/operators/throwIfEmpty.ts", "node_modules/rxjs/src/internal/operators/endWith.ts", "node_modules/rxjs/src/internal/operators/finalize.ts", "node_modules/rxjs/src/internal/operators/first.ts", "node_modules/rxjs/src/internal/operators/takeLast.ts", "node_modules/rxjs/src/internal/operators/merge.ts", "node_modules/rxjs/src/internal/operators/mergeWith.ts", "node_modules/rxjs/src/internal/operators/repeat.ts", "node_modules/rxjs/src/internal/operators/sample.ts", "node_modules/rxjs/src/internal/operators/scan.ts", "node_modules/rxjs/src/internal/operators/share.ts", "node_modules/rxjs/src/internal/operators/shareReplay.ts", "node_modules/rxjs/src/internal/operators/skip.ts", "node_modules/rxjs/src/internal/operators/skipUntil.ts", "node_modules/rxjs/src/internal/operators/startWith.ts", "node_modules/rxjs/src/internal/operators/switchMap.ts", "node_modules/rxjs/src/internal/operators/takeUntil.ts", "node_modules/rxjs/src/internal/operators/takeWhile.ts", "node_modules/rxjs/src/internal/operators/tap.ts", "node_modules/rxjs/src/internal/operators/throttle.ts", "node_modules/rxjs/src/internal/operators/throttleTime.ts", "node_modules/rxjs/src/internal/operators/withLatestFrom.ts", "node_modules/rxjs/src/internal/operators/zip.ts", "node_modules/rxjs/src/internal/operators/zipWith.ts", "src/templates/assets/javascripts/browser/document/index.ts", "src/templates/assets/javascripts/browser/element/_/index.ts", "src/templates/assets/javascripts/browser/element/focus/index.ts", "src/templates/assets/javascripts/browser/element/hover/index.ts", "src/templates/assets/javascripts/browser/element/offset/_/index.ts", "src/templates/assets/javascripts/browser/element/offset/content/index.ts", "src/templates/assets/javascripts/utilities/h/index.ts", "src/templates/assets/javascripts/utilities/round/index.ts", "src/templates/assets/javascripts/browser/script/index.ts", "src/templates/assets/javascripts/browser/element/size/_/index.ts", "src/templates/assets/javascripts/browser/element/size/content/index.ts", "src/templates/assets/javascripts/browser/element/visibility/index.ts", "src/templates/assets/javascripts/browser/toggle/index.ts", "src/templates/assets/javascripts/browser/keyboard/index.ts", "src/templates/assets/javascripts/browser/location/_/index.ts", "src/templates/assets/javascripts/browser/location/hash/index.ts", "src/templates/assets/javascripts/browser/media/index.ts", "src/templates/assets/javascripts/browser/request/index.ts", "src/templates/assets/javascripts/browser/viewport/offset/index.ts", "src/templates/assets/javascripts/browser/viewport/size/index.ts", "src/templates/assets/javascripts/browser/viewport/_/index.ts", "src/templates/assets/javascripts/browser/viewport/at/index.ts", "src/templates/assets/javascripts/browser/worker/index.ts", "src/templates/assets/javascripts/_/index.ts", "src/templates/assets/javascripts/components/_/index.ts", "src/templates/assets/javascripts/components/announce/index.ts", "src/templates/assets/javascripts/components/consent/index.ts", "src/templates/assets/javascripts/templates/tooltip/index.tsx", "src/templates/assets/javascripts/templates/annotation/index.tsx", "src/templates/assets/javascripts/templates/clipboard/index.tsx", "src/templates/assets/javascripts/templates/search/index.tsx", "src/templates/assets/javascripts/templates/source/index.tsx", "src/templates/assets/javascripts/templates/tabbed/index.tsx", "src/templates/assets/javascripts/templates/table/index.tsx", "src/templates/assets/javascripts/templates/version/index.tsx", "src/templates/assets/javascripts/components/tooltip/index.ts", "src/templates/assets/javascripts/components/content/annotation/_/index.ts", "src/templates/assets/javascripts/components/content/annotation/list/index.ts", "src/templates/assets/javascripts/components/content/annotation/block/index.ts", "src/templates/assets/javascripts/components/content/code/_/index.ts", "src/templates/assets/javascripts/components/content/details/index.ts", "src/templates/assets/javascripts/components/content/mermaid/index.css", "src/templates/assets/javascripts/components/content/mermaid/index.ts", "src/templates/assets/javascripts/components/content/table/index.ts", "src/templates/assets/javascripts/components/content/tabs/index.ts", "src/templates/assets/javascripts/components/content/_/index.ts", "src/templates/assets/javascripts/components/dialog/index.ts", "src/templates/assets/javascripts/components/header/_/index.ts", "src/templates/assets/javascripts/components/header/title/index.ts", "src/templates/assets/javascripts/components/main/index.ts", "src/templates/assets/javascripts/components/palette/index.ts", "src/templates/assets/javascripts/components/progress/index.ts", "src/templates/assets/javascripts/integrations/clipboard/index.ts", "src/templates/assets/javascripts/integrations/sitemap/index.ts", "src/templates/assets/javascripts/integrations/instant/index.ts", "src/templates/assets/javascripts/integrations/search/highlighter/index.ts", "src/templates/assets/javascripts/integrations/search/worker/message/index.ts", "src/templates/assets/javascripts/integrations/search/worker/_/index.ts", "src/templates/assets/javascripts/integrations/version/index.ts", "src/templates/assets/javascripts/components/search/query/index.ts", "src/templates/assets/javascripts/components/search/result/index.ts", "src/templates/assets/javascripts/components/search/share/index.ts", "src/templates/assets/javascripts/components/search/suggest/index.ts", "src/templates/assets/javascripts/components/search/_/index.ts", "src/templates/assets/javascripts/components/search/highlight/index.ts", "src/templates/assets/javascripts/components/sidebar/index.ts", "src/templates/assets/javascripts/components/source/facts/github/index.ts", "src/templates/assets/javascripts/components/source/facts/gitlab/index.ts", "src/templates/assets/javascripts/components/source/facts/_/index.ts", "src/templates/assets/javascripts/components/source/_/index.ts", "src/templates/assets/javascripts/components/tabs/index.ts", "src/templates/assets/javascripts/components/toc/index.ts", "src/templates/assets/javascripts/components/top/index.ts", "src/templates/assets/javascripts/patches/ellipsis/index.ts", "src/templates/assets/javascripts/patches/indeterminate/index.ts", "src/templates/assets/javascripts/patches/scrollfix/index.ts", "src/templates/assets/javascripts/patches/scrolllock/index.ts", "src/templates/assets/javascripts/polyfills/index.ts"], + "sourcesContent": ["(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory() :\n typeof define === 'function' && define.amd ? define(factory) :\n (factory());\n}(this, (function () { 'use strict';\n\n /**\n * Applies the :focus-visible polyfill at the given scope.\n * A scope in this case is either the top-level Document or a Shadow Root.\n *\n * @param {(Document|ShadowRoot)} scope\n * @see https://github.com/WICG/focus-visible\n */\n function applyFocusVisiblePolyfill(scope) {\n var hadKeyboardEvent = true;\n var hadFocusVisibleRecently = false;\n var hadFocusVisibleRecentlyTimeout = null;\n\n var inputTypesAllowlist = {\n text: true,\n search: true,\n url: true,\n tel: true,\n email: true,\n password: true,\n number: true,\n date: true,\n month: true,\n week: true,\n time: true,\n datetime: true,\n 'datetime-local': true\n };\n\n /**\n * Helper function for legacy browsers and iframes which sometimes focus\n * elements like document, body, and non-interactive SVG.\n * @param {Element} el\n */\n function isValidFocusTarget(el) {\n if (\n el &&\n el !== document &&\n el.nodeName !== 'HTML' &&\n el.nodeName !== 'BODY' &&\n 'classList' in el &&\n 'contains' in el.classList\n ) {\n return true;\n }\n return false;\n }\n\n /**\n * Computes whether the given element should automatically trigger the\n * `focus-visible` class being added, i.e. whether it should always match\n * `:focus-visible` when focused.\n * @param {Element} el\n * @return {boolean}\n */\n function focusTriggersKeyboardModality(el) {\n var type = el.type;\n var tagName = el.tagName;\n\n if (tagName === 'INPUT' && inputTypesAllowlist[type] && !el.readOnly) {\n return true;\n }\n\n if (tagName === 'TEXTAREA' && !el.readOnly) {\n return true;\n }\n\n if (el.isContentEditable) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Add the `focus-visible` class to the given element if it was not added by\n * the author.\n * @param {Element} el\n */\n function addFocusVisibleClass(el) {\n if (el.classList.contains('focus-visible')) {\n return;\n }\n el.classList.add('focus-visible');\n el.setAttribute('data-focus-visible-added', '');\n }\n\n /**\n * Remove the `focus-visible` class from the given element if it was not\n * originally added by the author.\n * @param {Element} el\n */\n function removeFocusVisibleClass(el) {\n if (!el.hasAttribute('data-focus-visible-added')) {\n return;\n }\n el.classList.remove('focus-visible');\n el.removeAttribute('data-focus-visible-added');\n }\n\n /**\n * If the most recent user interaction was via the keyboard;\n * and the key press did not include a meta, alt/option, or control key;\n * then the modality is keyboard. Otherwise, the modality is not keyboard.\n * Apply `focus-visible` to any current active element and keep track\n * of our keyboard modality state with `hadKeyboardEvent`.\n * @param {KeyboardEvent} e\n */\n function onKeyDown(e) {\n if (e.metaKey || e.altKey || e.ctrlKey) {\n return;\n }\n\n if (isValidFocusTarget(scope.activeElement)) {\n addFocusVisibleClass(scope.activeElement);\n }\n\n hadKeyboardEvent = true;\n }\n\n /**\n * If at any point a user clicks with a pointing device, ensure that we change\n * the modality away from keyboard.\n * This avoids the situation where a user presses a key on an already focused\n * element, and then clicks on a different element, focusing it with a\n * pointing device, while we still think we're in keyboard modality.\n * @param {Event} e\n */\n function onPointerDown(e) {\n hadKeyboardEvent = false;\n }\n\n /**\n * On `focus`, add the `focus-visible` class to the target if:\n * - the target received focus as a result of keyboard navigation, or\n * - the event target is an element that will likely require interaction\n * via the keyboard (e.g. a text box)\n * @param {Event} e\n */\n function onFocus(e) {\n // Prevent IE from focusing the document or HTML element.\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (hadKeyboardEvent || focusTriggersKeyboardModality(e.target)) {\n addFocusVisibleClass(e.target);\n }\n }\n\n /**\n * On `blur`, remove the `focus-visible` class from the target.\n * @param {Event} e\n */\n function onBlur(e) {\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (\n e.target.classList.contains('focus-visible') ||\n e.target.hasAttribute('data-focus-visible-added')\n ) {\n // To detect a tab/window switch, we look for a blur event followed\n // rapidly by a visibility change.\n // If we don't see a visibility change within 100ms, it's probably a\n // regular focus change.\n hadFocusVisibleRecently = true;\n window.clearTimeout(hadFocusVisibleRecentlyTimeout);\n hadFocusVisibleRecentlyTimeout = window.setTimeout(function() {\n hadFocusVisibleRecently = false;\n }, 100);\n removeFocusVisibleClass(e.target);\n }\n }\n\n /**\n * If the user changes tabs, keep track of whether or not the previously\n * focused element had .focus-visible.\n * @param {Event} e\n */\n function onVisibilityChange(e) {\n if (document.visibilityState === 'hidden') {\n // If the tab becomes active again, the browser will handle calling focus\n // on the element (Safari actually calls it twice).\n // If this tab change caused a blur on an element with focus-visible,\n // re-apply the class when the user switches back to the tab.\n if (hadFocusVisibleRecently) {\n hadKeyboardEvent = true;\n }\n addInitialPointerMoveListeners();\n }\n }\n\n /**\n * Add a group of listeners to detect usage of any pointing devices.\n * These listeners will be added when the polyfill first loads, and anytime\n * the window is blurred, so that they are active when the window regains\n * focus.\n */\n function addInitialPointerMoveListeners() {\n document.addEventListener('mousemove', onInitialPointerMove);\n document.addEventListener('mousedown', onInitialPointerMove);\n document.addEventListener('mouseup', onInitialPointerMove);\n document.addEventListener('pointermove', onInitialPointerMove);\n document.addEventListener('pointerdown', onInitialPointerMove);\n document.addEventListener('pointerup', onInitialPointerMove);\n document.addEventListener('touchmove', onInitialPointerMove);\n document.addEventListener('touchstart', onInitialPointerMove);\n document.addEventListener('touchend', onInitialPointerMove);\n }\n\n function removeInitialPointerMoveListeners() {\n document.removeEventListener('mousemove', onInitialPointerMove);\n document.removeEventListener('mousedown', onInitialPointerMove);\n document.removeEventListener('mouseup', onInitialPointerMove);\n document.removeEventListener('pointermove', onInitialPointerMove);\n document.removeEventListener('pointerdown', onInitialPointerMove);\n document.removeEventListener('pointerup', onInitialPointerMove);\n document.removeEventListener('touchmove', onInitialPointerMove);\n document.removeEventListener('touchstart', onInitialPointerMove);\n document.removeEventListener('touchend', onInitialPointerMove);\n }\n\n /**\n * When the polfyill first loads, assume the user is in keyboard modality.\n * If any event is received from a pointing device (e.g. mouse, pointer,\n * touch), turn off keyboard modality.\n * This accounts for situations where focus enters the page from the URL bar.\n * @param {Event} e\n */\n function onInitialPointerMove(e) {\n // Work around a Safari quirk that fires a mousemove on whenever the\n // window blurs, even if you're tabbing out of the page. \u00AF\\_(\u30C4)_/\u00AF\n if (e.target.nodeName && e.target.nodeName.toLowerCase() === 'html') {\n return;\n }\n\n hadKeyboardEvent = false;\n removeInitialPointerMoveListeners();\n }\n\n // For some kinds of state, we are interested in changes at the global scope\n // only. For example, global pointer input, global key presses and global\n // visibility change should affect the state at every scope:\n document.addEventListener('keydown', onKeyDown, true);\n document.addEventListener('mousedown', onPointerDown, true);\n document.addEventListener('pointerdown', onPointerDown, true);\n document.addEventListener('touchstart', onPointerDown, true);\n document.addEventListener('visibilitychange', onVisibilityChange, true);\n\n addInitialPointerMoveListeners();\n\n // For focus and blur, we specifically care about state changes in the local\n // scope. This is because focus / blur events that originate from within a\n // shadow root are not re-dispatched from the host element if it was already\n // the active element in its own scope:\n scope.addEventListener('focus', onFocus, true);\n scope.addEventListener('blur', onBlur, true);\n\n // We detect that a node is a ShadowRoot by ensuring that it is a\n // DocumentFragment and also has a host property. This check covers native\n // implementation and polyfill implementation transparently. If we only cared\n // about the native implementation, we could just check if the scope was\n // an instance of a ShadowRoot.\n if (scope.nodeType === Node.DOCUMENT_FRAGMENT_NODE && scope.host) {\n // Since a ShadowRoot is a special kind of DocumentFragment, it does not\n // have a root element to add a class to. So, we add this attribute to the\n // host element instead:\n scope.host.setAttribute('data-js-focus-visible', '');\n } else if (scope.nodeType === Node.DOCUMENT_NODE) {\n document.documentElement.classList.add('js-focus-visible');\n document.documentElement.setAttribute('data-js-focus-visible', '');\n }\n }\n\n // It is important to wrap all references to global window and document in\n // these checks to support server-side rendering use cases\n // @see https://github.com/WICG/focus-visible/issues/199\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n // Make the polyfill helper globally available. This can be used as a signal\n // to interested libraries that wish to coordinate with the polyfill for e.g.,\n // applying the polyfill to a shadow root:\n window.applyFocusVisiblePolyfill = applyFocusVisiblePolyfill;\n\n // Notify interested libraries of the polyfill's presence, in case the\n // polyfill was loaded lazily:\n var event;\n\n try {\n event = new CustomEvent('focus-visible-polyfill-ready');\n } catch (error) {\n // IE11 does not support using CustomEvent as a constructor directly:\n event = document.createEvent('CustomEvent');\n event.initCustomEvent('focus-visible-polyfill-ready', false, false, {});\n }\n\n window.dispatchEvent(event);\n }\n\n if (typeof document !== 'undefined') {\n // Apply the polyfill to the global document, so that no JavaScript\n // coordination is required to use the polyfill in the top-level document:\n applyFocusVisiblePolyfill(document);\n }\n\n})));\n", "/*!\n * clipboard.js v2.0.11\n * https://clipboardjs.com/\n *\n * Licensed MIT \u00A9 Zeno Rocha\n */\n(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"ClipboardJS\"] = factory();\n\telse\n\t\troot[\"ClipboardJS\"] = factory();\n})(this, function() {\nreturn /******/ (function() { // webpackBootstrap\n/******/ \tvar __webpack_modules__ = ({\n\n/***/ 686:\n/***/ (function(__unused_webpack_module, __webpack_exports__, __webpack_require__) {\n\n\"use strict\";\n\n// EXPORTS\n__webpack_require__.d(__webpack_exports__, {\n \"default\": function() { return /* binding */ clipboard; }\n});\n\n// EXTERNAL MODULE: ./node_modules/tiny-emitter/index.js\nvar tiny_emitter = __webpack_require__(279);\nvar tiny_emitter_default = /*#__PURE__*/__webpack_require__.n(tiny_emitter);\n// EXTERNAL MODULE: ./node_modules/good-listener/src/listen.js\nvar listen = __webpack_require__(370);\nvar listen_default = /*#__PURE__*/__webpack_require__.n(listen);\n// EXTERNAL MODULE: ./node_modules/select/src/select.js\nvar src_select = __webpack_require__(817);\nvar select_default = /*#__PURE__*/__webpack_require__.n(src_select);\n;// CONCATENATED MODULE: ./src/common/command.js\n/**\n * Executes a given operation type.\n * @param {String} type\n * @return {Boolean}\n */\nfunction command(type) {\n try {\n return document.execCommand(type);\n } catch (err) {\n return false;\n }\n}\n;// CONCATENATED MODULE: ./src/actions/cut.js\n\n\n/**\n * Cut action wrapper.\n * @param {String|HTMLElement} target\n * @return {String}\n */\n\nvar ClipboardActionCut = function ClipboardActionCut(target) {\n var selectedText = select_default()(target);\n command('cut');\n return selectedText;\n};\n\n/* harmony default export */ var actions_cut = (ClipboardActionCut);\n;// CONCATENATED MODULE: ./src/common/create-fake-element.js\n/**\n * Creates a fake textarea element with a value.\n * @param {String} value\n * @return {HTMLElement}\n */\nfunction createFakeElement(value) {\n var isRTL = document.documentElement.getAttribute('dir') === 'rtl';\n var fakeElement = document.createElement('textarea'); // Prevent zooming on iOS\n\n fakeElement.style.fontSize = '12pt'; // Reset box model\n\n fakeElement.style.border = '0';\n fakeElement.style.padding = '0';\n fakeElement.style.margin = '0'; // Move element out of screen horizontally\n\n fakeElement.style.position = 'absolute';\n fakeElement.style[isRTL ? 'right' : 'left'] = '-9999px'; // Move element to the same position vertically\n\n var yPosition = window.pageYOffset || document.documentElement.scrollTop;\n fakeElement.style.top = \"\".concat(yPosition, \"px\");\n fakeElement.setAttribute('readonly', '');\n fakeElement.value = value;\n return fakeElement;\n}\n;// CONCATENATED MODULE: ./src/actions/copy.js\n\n\n\n/**\n * Create fake copy action wrapper using a fake element.\n * @param {String} target\n * @param {Object} options\n * @return {String}\n */\n\nvar fakeCopyAction = function fakeCopyAction(value, options) {\n var fakeElement = createFakeElement(value);\n options.container.appendChild(fakeElement);\n var selectedText = select_default()(fakeElement);\n command('copy');\n fakeElement.remove();\n return selectedText;\n};\n/**\n * Copy action wrapper.\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @return {String}\n */\n\n\nvar ClipboardActionCopy = function ClipboardActionCopy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n var selectedText = '';\n\n if (typeof target === 'string') {\n selectedText = fakeCopyAction(target, options);\n } else if (target instanceof HTMLInputElement && !['text', 'search', 'url', 'tel', 'password'].includes(target === null || target === void 0 ? void 0 : target.type)) {\n // If input type doesn't support `setSelectionRange`. Simulate it. https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/setSelectionRange\n selectedText = fakeCopyAction(target.value, options);\n } else {\n selectedText = select_default()(target);\n command('copy');\n }\n\n return selectedText;\n};\n\n/* harmony default export */ var actions_copy = (ClipboardActionCopy);\n;// CONCATENATED MODULE: ./src/actions/default.js\nfunction _typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\n\n\n/**\n * Inner function which performs selection from either `text` or `target`\n * properties and then executes copy or cut operations.\n * @param {Object} options\n */\n\nvar ClipboardActionDefault = function ClipboardActionDefault() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n // Defines base properties passed from constructor.\n var _options$action = options.action,\n action = _options$action === void 0 ? 'copy' : _options$action,\n container = options.container,\n target = options.target,\n text = options.text; // Sets the `action` to be performed which can be either 'copy' or 'cut'.\n\n if (action !== 'copy' && action !== 'cut') {\n throw new Error('Invalid \"action\" value, use either \"copy\" or \"cut\"');\n } // Sets the `target` property using an element that will be have its content copied.\n\n\n if (target !== undefined) {\n if (target && _typeof(target) === 'object' && target.nodeType === 1) {\n if (action === 'copy' && target.hasAttribute('disabled')) {\n throw new Error('Invalid \"target\" attribute. Please use \"readonly\" instead of \"disabled\" attribute');\n }\n\n if (action === 'cut' && (target.hasAttribute('readonly') || target.hasAttribute('disabled'))) {\n throw new Error('Invalid \"target\" attribute. You can\\'t cut text from elements with \"readonly\" or \"disabled\" attributes');\n }\n } else {\n throw new Error('Invalid \"target\" value, use a valid Element');\n }\n } // Define selection strategy based on `text` property.\n\n\n if (text) {\n return actions_copy(text, {\n container: container\n });\n } // Defines which selection strategy based on `target` property.\n\n\n if (target) {\n return action === 'cut' ? actions_cut(target) : actions_copy(target, {\n container: container\n });\n }\n};\n\n/* harmony default export */ var actions_default = (ClipboardActionDefault);\n;// CONCATENATED MODULE: ./src/clipboard.js\nfunction clipboard_typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { clipboard_typeof = function _typeof(obj) { return typeof obj; }; } else { clipboard_typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return clipboard_typeof(obj); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (clipboard_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\n\n\n\n\n\n/**\n * Helper function to retrieve attribute value.\n * @param {String} suffix\n * @param {Element} element\n */\n\nfunction getAttributeValue(suffix, element) {\n var attribute = \"data-clipboard-\".concat(suffix);\n\n if (!element.hasAttribute(attribute)) {\n return;\n }\n\n return element.getAttribute(attribute);\n}\n/**\n * Base class which takes one or more elements, adds event listeners to them,\n * and instantiates a new `ClipboardAction` on each click.\n */\n\n\nvar Clipboard = /*#__PURE__*/function (_Emitter) {\n _inherits(Clipboard, _Emitter);\n\n var _super = _createSuper(Clipboard);\n\n /**\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n * @param {Object} options\n */\n function Clipboard(trigger, options) {\n var _this;\n\n _classCallCheck(this, Clipboard);\n\n _this = _super.call(this);\n\n _this.resolveOptions(options);\n\n _this.listenClick(trigger);\n\n return _this;\n }\n /**\n * Defines if attributes would be resolved using internal setter functions\n * or custom functions that were passed in the constructor.\n * @param {Object} options\n */\n\n\n _createClass(Clipboard, [{\n key: \"resolveOptions\",\n value: function resolveOptions() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n this.action = typeof options.action === 'function' ? options.action : this.defaultAction;\n this.target = typeof options.target === 'function' ? options.target : this.defaultTarget;\n this.text = typeof options.text === 'function' ? options.text : this.defaultText;\n this.container = clipboard_typeof(options.container) === 'object' ? options.container : document.body;\n }\n /**\n * Adds a click event listener to the passed trigger.\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n */\n\n }, {\n key: \"listenClick\",\n value: function listenClick(trigger) {\n var _this2 = this;\n\n this.listener = listen_default()(trigger, 'click', function (e) {\n return _this2.onClick(e);\n });\n }\n /**\n * Defines a new `ClipboardAction` on each click event.\n * @param {Event} e\n */\n\n }, {\n key: \"onClick\",\n value: function onClick(e) {\n var trigger = e.delegateTarget || e.currentTarget;\n var action = this.action(trigger) || 'copy';\n var text = actions_default({\n action: action,\n container: this.container,\n target: this.target(trigger),\n text: this.text(trigger)\n }); // Fires an event based on the copy operation result.\n\n this.emit(text ? 'success' : 'error', {\n action: action,\n text: text,\n trigger: trigger,\n clearSelection: function clearSelection() {\n if (trigger) {\n trigger.focus();\n }\n\n window.getSelection().removeAllRanges();\n }\n });\n }\n /**\n * Default `action` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultAction\",\n value: function defaultAction(trigger) {\n return getAttributeValue('action', trigger);\n }\n /**\n * Default `target` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultTarget\",\n value: function defaultTarget(trigger) {\n var selector = getAttributeValue('target', trigger);\n\n if (selector) {\n return document.querySelector(selector);\n }\n }\n /**\n * Allow fire programmatically a copy action\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @returns Text copied.\n */\n\n }, {\n key: \"defaultText\",\n\n /**\n * Default `text` lookup function.\n * @param {Element} trigger\n */\n value: function defaultText(trigger) {\n return getAttributeValue('text', trigger);\n }\n /**\n * Destroy lifecycle.\n */\n\n }, {\n key: \"destroy\",\n value: function destroy() {\n this.listener.destroy();\n }\n }], [{\n key: \"copy\",\n value: function copy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n return actions_copy(target, options);\n }\n /**\n * Allow fire programmatically a cut action\n * @param {String|HTMLElement} target\n * @returns Text cutted.\n */\n\n }, {\n key: \"cut\",\n value: function cut(target) {\n return actions_cut(target);\n }\n /**\n * Returns the support of the given action, or all actions if no action is\n * given.\n * @param {String} [action]\n */\n\n }, {\n key: \"isSupported\",\n value: function isSupported() {\n var action = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ['copy', 'cut'];\n var actions = typeof action === 'string' ? [action] : action;\n var support = !!document.queryCommandSupported;\n actions.forEach(function (action) {\n support = support && !!document.queryCommandSupported(action);\n });\n return support;\n }\n }]);\n\n return Clipboard;\n}((tiny_emitter_default()));\n\n/* harmony default export */ var clipboard = (Clipboard);\n\n/***/ }),\n\n/***/ 828:\n/***/ (function(module) {\n\nvar DOCUMENT_NODE_TYPE = 9;\n\n/**\n * A polyfill for Element.matches()\n */\nif (typeof Element !== 'undefined' && !Element.prototype.matches) {\n var proto = Element.prototype;\n\n proto.matches = proto.matchesSelector ||\n proto.mozMatchesSelector ||\n proto.msMatchesSelector ||\n proto.oMatchesSelector ||\n proto.webkitMatchesSelector;\n}\n\n/**\n * Finds the closest parent that matches a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @return {Function}\n */\nfunction closest (element, selector) {\n while (element && element.nodeType !== DOCUMENT_NODE_TYPE) {\n if (typeof element.matches === 'function' &&\n element.matches(selector)) {\n return element;\n }\n element = element.parentNode;\n }\n}\n\nmodule.exports = closest;\n\n\n/***/ }),\n\n/***/ 438:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar closest = __webpack_require__(828);\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction _delegate(element, selector, type, callback, useCapture) {\n var listenerFn = listener.apply(this, arguments);\n\n element.addEventListener(type, listenerFn, useCapture);\n\n return {\n destroy: function() {\n element.removeEventListener(type, listenerFn, useCapture);\n }\n }\n}\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element|String|Array} [elements]\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction delegate(elements, selector, type, callback, useCapture) {\n // Handle the regular Element usage\n if (typeof elements.addEventListener === 'function') {\n return _delegate.apply(null, arguments);\n }\n\n // Handle Element-less usage, it defaults to global delegation\n if (typeof type === 'function') {\n // Use `document` as the first parameter, then apply arguments\n // This is a short way to .unshift `arguments` without running into deoptimizations\n return _delegate.bind(null, document).apply(null, arguments);\n }\n\n // Handle Selector-based usage\n if (typeof elements === 'string') {\n elements = document.querySelectorAll(elements);\n }\n\n // Handle Array-like based usage\n return Array.prototype.map.call(elements, function (element) {\n return _delegate(element, selector, type, callback, useCapture);\n });\n}\n\n/**\n * Finds closest match and invokes callback.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Function}\n */\nfunction listener(element, selector, type, callback) {\n return function(e) {\n e.delegateTarget = closest(e.target, selector);\n\n if (e.delegateTarget) {\n callback.call(element, e);\n }\n }\n}\n\nmodule.exports = delegate;\n\n\n/***/ }),\n\n/***/ 879:\n/***/ (function(__unused_webpack_module, exports) {\n\n/**\n * Check if argument is a HTML element.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.node = function(value) {\n return value !== undefined\n && value instanceof HTMLElement\n && value.nodeType === 1;\n};\n\n/**\n * Check if argument is a list of HTML elements.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.nodeList = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return value !== undefined\n && (type === '[object NodeList]' || type === '[object HTMLCollection]')\n && ('length' in value)\n && (value.length === 0 || exports.node(value[0]));\n};\n\n/**\n * Check if argument is a string.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.string = function(value) {\n return typeof value === 'string'\n || value instanceof String;\n};\n\n/**\n * Check if argument is a function.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.fn = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return type === '[object Function]';\n};\n\n\n/***/ }),\n\n/***/ 370:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar is = __webpack_require__(879);\nvar delegate = __webpack_require__(438);\n\n/**\n * Validates all params and calls the right\n * listener function based on its target type.\n *\n * @param {String|HTMLElement|HTMLCollection|NodeList} target\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listen(target, type, callback) {\n if (!target && !type && !callback) {\n throw new Error('Missing required arguments');\n }\n\n if (!is.string(type)) {\n throw new TypeError('Second argument must be a String');\n }\n\n if (!is.fn(callback)) {\n throw new TypeError('Third argument must be a Function');\n }\n\n if (is.node(target)) {\n return listenNode(target, type, callback);\n }\n else if (is.nodeList(target)) {\n return listenNodeList(target, type, callback);\n }\n else if (is.string(target)) {\n return listenSelector(target, type, callback);\n }\n else {\n throw new TypeError('First argument must be a String, HTMLElement, HTMLCollection, or NodeList');\n }\n}\n\n/**\n * Adds an event listener to a HTML element\n * and returns a remove listener function.\n *\n * @param {HTMLElement} node\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNode(node, type, callback) {\n node.addEventListener(type, callback);\n\n return {\n destroy: function() {\n node.removeEventListener(type, callback);\n }\n }\n}\n\n/**\n * Add an event listener to a list of HTML elements\n * and returns a remove listener function.\n *\n * @param {NodeList|HTMLCollection} nodeList\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNodeList(nodeList, type, callback) {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.addEventListener(type, callback);\n });\n\n return {\n destroy: function() {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.removeEventListener(type, callback);\n });\n }\n }\n}\n\n/**\n * Add an event listener to a selector\n * and returns a remove listener function.\n *\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenSelector(selector, type, callback) {\n return delegate(document.body, selector, type, callback);\n}\n\nmodule.exports = listen;\n\n\n/***/ }),\n\n/***/ 817:\n/***/ (function(module) {\n\nfunction select(element) {\n var selectedText;\n\n if (element.nodeName === 'SELECT') {\n element.focus();\n\n selectedText = element.value;\n }\n else if (element.nodeName === 'INPUT' || element.nodeName === 'TEXTAREA') {\n var isReadOnly = element.hasAttribute('readonly');\n\n if (!isReadOnly) {\n element.setAttribute('readonly', '');\n }\n\n element.select();\n element.setSelectionRange(0, element.value.length);\n\n if (!isReadOnly) {\n element.removeAttribute('readonly');\n }\n\n selectedText = element.value;\n }\n else {\n if (element.hasAttribute('contenteditable')) {\n element.focus();\n }\n\n var selection = window.getSelection();\n var range = document.createRange();\n\n range.selectNodeContents(element);\n selection.removeAllRanges();\n selection.addRange(range);\n\n selectedText = selection.toString();\n }\n\n return selectedText;\n}\n\nmodule.exports = select;\n\n\n/***/ }),\n\n/***/ 279:\n/***/ (function(module) {\n\nfunction E () {\n // Keep this empty so it's easier to inherit from\n // (via https://github.com/lipsmack from https://github.com/scottcorgan/tiny-emitter/issues/3)\n}\n\nE.prototype = {\n on: function (name, callback, ctx) {\n var e = this.e || (this.e = {});\n\n (e[name] || (e[name] = [])).push({\n fn: callback,\n ctx: ctx\n });\n\n return this;\n },\n\n once: function (name, callback, ctx) {\n var self = this;\n function listener () {\n self.off(name, listener);\n callback.apply(ctx, arguments);\n };\n\n listener._ = callback\n return this.on(name, listener, ctx);\n },\n\n emit: function (name) {\n var data = [].slice.call(arguments, 1);\n var evtArr = ((this.e || (this.e = {}))[name] || []).slice();\n var i = 0;\n var len = evtArr.length;\n\n for (i; i < len; i++) {\n evtArr[i].fn.apply(evtArr[i].ctx, data);\n }\n\n return this;\n },\n\n off: function (name, callback) {\n var e = this.e || (this.e = {});\n var evts = e[name];\n var liveEvents = [];\n\n if (evts && callback) {\n for (var i = 0, len = evts.length; i < len; i++) {\n if (evts[i].fn !== callback && evts[i].fn._ !== callback)\n liveEvents.push(evts[i]);\n }\n }\n\n // Remove event from queue to prevent memory leak\n // Suggested by https://github.com/lazd\n // Ref: https://github.com/scottcorgan/tiny-emitter/commit/c6ebfaa9bc973b33d110a84a307742b7cf94c953#commitcomment-5024910\n\n (liveEvents.length)\n ? e[name] = liveEvents\n : delete e[name];\n\n return this;\n }\n};\n\nmodule.exports = E;\nmodule.exports.TinyEmitter = E;\n\n\n/***/ })\n\n/******/ \t});\n/************************************************************************/\n/******/ \t// The module cache\n/******/ \tvar __webpack_module_cache__ = {};\n/******/ \t\n/******/ \t// The require function\n/******/ \tfunction __webpack_require__(moduleId) {\n/******/ \t\t// Check if module is in cache\n/******/ \t\tif(__webpack_module_cache__[moduleId]) {\n/******/ \t\t\treturn __webpack_module_cache__[moduleId].exports;\n/******/ \t\t}\n/******/ \t\t// Create a new module (and put it into the cache)\n/******/ \t\tvar module = __webpack_module_cache__[moduleId] = {\n/******/ \t\t\t// no module.id needed\n/******/ \t\t\t// no module.loaded needed\n/******/ \t\t\texports: {}\n/******/ \t\t};\n/******/ \t\n/******/ \t\t// Execute the module function\n/******/ \t\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n/******/ \t\n/******/ \t\t// Return the exports of the module\n/******/ \t\treturn module.exports;\n/******/ \t}\n/******/ \t\n/************************************************************************/\n/******/ \t/* webpack/runtime/compat get default export */\n/******/ \t!function() {\n/******/ \t\t// getDefaultExport function for compatibility with non-harmony modules\n/******/ \t\t__webpack_require__.n = function(module) {\n/******/ \t\t\tvar getter = module && module.__esModule ?\n/******/ \t\t\t\tfunction() { return module['default']; } :\n/******/ \t\t\t\tfunction() { return module; };\n/******/ \t\t\t__webpack_require__.d(getter, { a: getter });\n/******/ \t\t\treturn getter;\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/define property getters */\n/******/ \t!function() {\n/******/ \t\t// define getter functions for harmony exports\n/******/ \t\t__webpack_require__.d = function(exports, definition) {\n/******/ \t\t\tfor(var key in definition) {\n/******/ \t\t\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n/******/ \t\t\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n/******/ \t\t\t\t}\n/******/ \t\t\t}\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/hasOwnProperty shorthand */\n/******/ \t!function() {\n/******/ \t\t__webpack_require__.o = function(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); }\n/******/ \t}();\n/******/ \t\n/************************************************************************/\n/******/ \t// module exports must be returned from runtime so entry inlining is disabled\n/******/ \t// startup\n/******/ \t// Load entry module and return exports\n/******/ \treturn __webpack_require__(686);\n/******/ })()\n.default;\n});", "/*!\n * escape-html\n * Copyright(c) 2012-2013 TJ Holowaychuk\n * Copyright(c) 2015 Andreas Lubbe\n * Copyright(c) 2015 Tiancheng \"Timothy\" Gu\n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module variables.\n * @private\n */\n\nvar matchHtmlRegExp = /[\"'&<>]/;\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = escapeHtml;\n\n/**\n * Escape special characters in the given string of html.\n *\n * @param {string} string The string to escape for inserting into HTML\n * @return {string}\n * @public\n */\n\nfunction escapeHtml(string) {\n var str = '' + string;\n var match = matchHtmlRegExp.exec(str);\n\n if (!match) {\n return str;\n }\n\n var escape;\n var html = '';\n var index = 0;\n var lastIndex = 0;\n\n for (index = match.index; index < str.length; index++) {\n switch (str.charCodeAt(index)) {\n case 34: // \"\n escape = '"';\n break;\n case 38: // &\n escape = '&';\n break;\n case 39: // '\n escape = ''';\n break;\n case 60: // <\n escape = '<';\n break;\n case 62: // >\n escape = '>';\n break;\n default:\n continue;\n }\n\n if (lastIndex !== index) {\n html += str.substring(lastIndex, index);\n }\n\n lastIndex = index + 1;\n html += escape;\n }\n\n return lastIndex !== index\n ? html + str.substring(lastIndex, index)\n : html;\n}\n", "/*\n * Copyright (c) 2016-2024 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport \"focus-visible\"\n\nimport {\n EMPTY,\n NEVER,\n Observable,\n Subject,\n defer,\n delay,\n filter,\n map,\n merge,\n mergeWith,\n shareReplay,\n switchMap\n} from \"rxjs\"\n\nimport { configuration, feature } from \"./_\"\nimport {\n at,\n getActiveElement,\n getOptionalElement,\n requestJSON,\n setLocation,\n setToggle,\n watchDocument,\n watchKeyboard,\n watchLocation,\n watchLocationTarget,\n watchMedia,\n watchPrint,\n watchScript,\n watchViewport\n} from \"./browser\"\nimport {\n getComponentElement,\n getComponentElements,\n mountAnnounce,\n mountBackToTop,\n mountConsent,\n mountContent,\n mountDialog,\n mountHeader,\n mountHeaderTitle,\n mountPalette,\n mountProgress,\n mountSearch,\n mountSearchHiglight,\n mountSidebar,\n mountSource,\n mountTableOfContents,\n mountTabs,\n watchHeader,\n watchMain\n} from \"./components\"\nimport {\n SearchIndex,\n setupClipboardJS,\n setupInstantNavigation,\n setupVersionSelector\n} from \"./integrations\"\nimport {\n patchEllipsis,\n patchIndeterminate,\n patchScrollfix,\n patchScrolllock\n} from \"./patches\"\nimport \"./polyfills\"\n\n/* ----------------------------------------------------------------------------\n * Functions - @todo refactor\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch search index\n *\n * @returns Search index observable\n */\nfunction fetchSearchIndex(): Observable {\n if (location.protocol === \"file:\") {\n return watchScript(\n `${new URL(\"search/search_index.js\", config.base)}`\n )\n .pipe(\n // @ts-ignore - @todo fix typings\n map(() => __index),\n shareReplay(1)\n )\n } else {\n return requestJSON(\n new URL(\"search/search_index.json\", config.base)\n )\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Application\n * ------------------------------------------------------------------------- */\n\n/* Yay, JavaScript is available */\ndocument.documentElement.classList.remove(\"no-js\")\ndocument.documentElement.classList.add(\"js\")\n\n/* Set up navigation observables and subjects */\nconst document$ = watchDocument()\nconst location$ = watchLocation()\nconst target$ = watchLocationTarget(location$)\nconst keyboard$ = watchKeyboard()\n\n/* Set up media observables */\nconst viewport$ = watchViewport()\nconst tablet$ = watchMedia(\"(min-width: 960px)\")\nconst screen$ = watchMedia(\"(min-width: 1220px)\")\nconst print$ = watchPrint()\n\n/* Retrieve search index, if search is enabled */\nconst config = configuration()\nconst index$ = document.forms.namedItem(\"search\")\n ? fetchSearchIndex()\n : NEVER\n\n/* Set up Clipboard.js integration */\nconst alert$ = new Subject()\nsetupClipboardJS({ alert$ })\n\n/* Set up progress indicator */\nconst progress$ = new Subject()\n\n/* Set up instant navigation, if enabled */\nif (feature(\"navigation.instant\"))\n setupInstantNavigation({ location$, viewport$, progress$ })\n .subscribe(document$)\n\n/* Set up version selector */\nif (config.version?.provider === \"mike\")\n setupVersionSelector({ document$ })\n\n/* Always close drawer and search on navigation */\nmerge(location$, target$)\n .pipe(\n delay(125)\n )\n .subscribe(() => {\n setToggle(\"drawer\", false)\n setToggle(\"search\", false)\n })\n\n/* Set up global keyboard handlers */\nkeyboard$\n .pipe(\n filter(({ mode }) => mode === \"global\")\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Go to previous page */\n case \"p\":\n case \",\":\n const prev = getOptionalElement(\"link[rel=prev]\")\n if (typeof prev !== \"undefined\")\n setLocation(prev)\n break\n\n /* Go to next page */\n case \"n\":\n case \".\":\n const next = getOptionalElement(\"link[rel=next]\")\n if (typeof next !== \"undefined\")\n setLocation(next)\n break\n\n /* Expand navigation, see https://bit.ly/3ZjG5io */\n case \"Enter\":\n const active = getActiveElement()\n if (active instanceof HTMLLabelElement)\n active.click()\n }\n })\n\n/* Set up patches */\npatchEllipsis({ document$ })\npatchIndeterminate({ document$, tablet$ })\npatchScrollfix({ document$ })\npatchScrolllock({ viewport$, tablet$ })\n\n/* Set up header and main area observable */\nconst header$ = watchHeader(getComponentElement(\"header\"), { viewport$ })\nconst main$ = document$\n .pipe(\n map(() => getComponentElement(\"main\")),\n switchMap(el => watchMain(el, { viewport$, header$ })),\n shareReplay(1)\n )\n\n/* Set up control component observables */\nconst control$ = merge(\n\n /* Consent */\n ...getComponentElements(\"consent\")\n .map(el => mountConsent(el, { target$ })),\n\n /* Dialog */\n ...getComponentElements(\"dialog\")\n .map(el => mountDialog(el, { alert$ })),\n\n /* Header */\n ...getComponentElements(\"header\")\n .map(el => mountHeader(el, { viewport$, header$, main$ })),\n\n /* Color palette */\n ...getComponentElements(\"palette\")\n .map(el => mountPalette(el)),\n\n /* Progress bar */\n ...getComponentElements(\"progress\")\n .map(el => mountProgress(el, { progress$ })),\n\n /* Search */\n ...getComponentElements(\"search\")\n .map(el => mountSearch(el, { index$, keyboard$ })),\n\n /* Repository information */\n ...getComponentElements(\"source\")\n .map(el => mountSource(el))\n)\n\n/* Set up content component observables */\nconst content$ = defer(() => merge(\n\n /* Announcement bar */\n ...getComponentElements(\"announce\")\n .map(el => mountAnnounce(el)),\n\n /* Content */\n ...getComponentElements(\"content\")\n .map(el => mountContent(el, { viewport$, target$, print$ })),\n\n /* Search highlighting */\n ...getComponentElements(\"content\")\n .map(el => feature(\"search.highlight\")\n ? mountSearchHiglight(el, { index$, location$ })\n : EMPTY\n ),\n\n /* Header title */\n ...getComponentElements(\"header-title\")\n .map(el => mountHeaderTitle(el, { viewport$, header$ })),\n\n /* Sidebar */\n ...getComponentElements(\"sidebar\")\n .map(el => el.getAttribute(\"data-md-type\") === \"navigation\"\n ? at(screen$, () => mountSidebar(el, { viewport$, header$, main$ }))\n : at(tablet$, () => mountSidebar(el, { viewport$, header$, main$ }))\n ),\n\n /* Navigation tabs */\n ...getComponentElements(\"tabs\")\n .map(el => mountTabs(el, { viewport$, header$ })),\n\n /* Table of contents */\n ...getComponentElements(\"toc\")\n .map(el => mountTableOfContents(el, {\n viewport$, header$, main$, target$\n })),\n\n /* Back-to-top button */\n ...getComponentElements(\"top\")\n .map(el => mountBackToTop(el, { viewport$, header$, main$, target$ }))\n))\n\n/* Set up component observables */\nconst component$ = document$\n .pipe(\n switchMap(() => content$),\n mergeWith(control$),\n shareReplay(1)\n )\n\n/* Subscribe to all components */\ncomponent$.subscribe()\n\n/* ----------------------------------------------------------------------------\n * Exports\n * ------------------------------------------------------------------------- */\n\nwindow.document$ = document$ /* Document observable */\nwindow.location$ = location$ /* Location subject */\nwindow.target$ = target$ /* Location target observable */\nwindow.keyboard$ = keyboard$ /* Keyboard observable */\nwindow.viewport$ = viewport$ /* Viewport observable */\nwindow.tablet$ = tablet$ /* Media tablet observable */\nwindow.screen$ = screen$ /* Media screen observable */\nwindow.print$ = print$ /* Media print observable */\nwindow.alert$ = alert$ /* Alert subject */\nwindow.progress$ = progress$ /* Progress indicator subject */\nwindow.component$ = component$ /* Component observable */\n", "/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from, pack) {\r\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\r\n if (ar || !(i in from)) {\r\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\r\n ar[i] = from[i];\r\n }\r\n }\r\n return to.concat(ar || Array.prototype.slice.call(from));\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n}\r\n", "/**\n * Returns true if the object is a function.\n * @param value The value to check\n */\nexport function isFunction(value: any): value is (...args: any[]) => any {\n return typeof value === 'function';\n}\n", "/**\n * Used to create Error subclasses until the community moves away from ES5.\n *\n * This is because compiling from TypeScript down to ES5 has issues with subclassing Errors\n * as well as other built-in types: https://github.com/Microsoft/TypeScript/issues/12123\n *\n * @param createImpl A factory function to create the actual constructor implementation. The returned\n * function should be a named function that calls `_super` internally.\n */\nexport function createErrorClass(createImpl: (_super: any) => any): T {\n const _super = (instance: any) => {\n Error.call(instance);\n instance.stack = new Error().stack;\n };\n\n const ctorFunc = createImpl(_super);\n ctorFunc.prototype = Object.create(Error.prototype);\n ctorFunc.prototype.constructor = ctorFunc;\n return ctorFunc;\n}\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface UnsubscriptionError extends Error {\n readonly errors: any[];\n}\n\nexport interface UnsubscriptionErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (errors: any[]): UnsubscriptionError;\n}\n\n/**\n * An error thrown when one or more errors have occurred during the\n * `unsubscribe` of a {@link Subscription}.\n */\nexport const UnsubscriptionError: UnsubscriptionErrorCtor = createErrorClass(\n (_super) =>\n function UnsubscriptionErrorImpl(this: any, errors: (Error | string)[]) {\n _super(this);\n this.message = errors\n ? `${errors.length} errors occurred during unsubscription:\n${errors.map((err, i) => `${i + 1}) ${err.toString()}`).join('\\n ')}`\n : '';\n this.name = 'UnsubscriptionError';\n this.errors = errors;\n }\n);\n", "/**\n * Removes an item from an array, mutating it.\n * @param arr The array to remove the item from\n * @param item The item to remove\n */\nexport function arrRemove(arr: T[] | undefined | null, item: T) {\n if (arr) {\n const index = arr.indexOf(item);\n 0 <= index && arr.splice(index, 1);\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { UnsubscriptionError } from './util/UnsubscriptionError';\nimport { SubscriptionLike, TeardownLogic, Unsubscribable } from './types';\nimport { arrRemove } from './util/arrRemove';\n\n/**\n * Represents a disposable resource, such as the execution of an Observable. A\n * Subscription has one important method, `unsubscribe`, that takes no argument\n * and just disposes the resource held by the subscription.\n *\n * Additionally, subscriptions may be grouped together through the `add()`\n * method, which will attach a child Subscription to the current Subscription.\n * When a Subscription is unsubscribed, all its children (and its grandchildren)\n * will be unsubscribed as well.\n *\n * @class Subscription\n */\nexport class Subscription implements SubscriptionLike {\n /** @nocollapse */\n public static EMPTY = (() => {\n const empty = new Subscription();\n empty.closed = true;\n return empty;\n })();\n\n /**\n * A flag to indicate whether this Subscription has already been unsubscribed.\n */\n public closed = false;\n\n private _parentage: Subscription[] | Subscription | null = null;\n\n /**\n * The list of registered finalizers to execute upon unsubscription. Adding and removing from this\n * list occurs in the {@link #add} and {@link #remove} methods.\n */\n private _finalizers: Exclude[] | null = null;\n\n /**\n * @param initialTeardown A function executed first as part of the finalization\n * process that is kicked off when {@link #unsubscribe} is called.\n */\n constructor(private initialTeardown?: () => void) {}\n\n /**\n * Disposes the resources held by the subscription. May, for instance, cancel\n * an ongoing Observable execution or cancel any other type of work that\n * started when the Subscription was created.\n * @return {void}\n */\n unsubscribe(): void {\n let errors: any[] | undefined;\n\n if (!this.closed) {\n this.closed = true;\n\n // Remove this from it's parents.\n const { _parentage } = this;\n if (_parentage) {\n this._parentage = null;\n if (Array.isArray(_parentage)) {\n for (const parent of _parentage) {\n parent.remove(this);\n }\n } else {\n _parentage.remove(this);\n }\n }\n\n const { initialTeardown: initialFinalizer } = this;\n if (isFunction(initialFinalizer)) {\n try {\n initialFinalizer();\n } catch (e) {\n errors = e instanceof UnsubscriptionError ? e.errors : [e];\n }\n }\n\n const { _finalizers } = this;\n if (_finalizers) {\n this._finalizers = null;\n for (const finalizer of _finalizers) {\n try {\n execFinalizer(finalizer);\n } catch (err) {\n errors = errors ?? [];\n if (err instanceof UnsubscriptionError) {\n errors = [...errors, ...err.errors];\n } else {\n errors.push(err);\n }\n }\n }\n }\n\n if (errors) {\n throw new UnsubscriptionError(errors);\n }\n }\n }\n\n /**\n * Adds a finalizer to this subscription, so that finalization will be unsubscribed/called\n * when this subscription is unsubscribed. If this subscription is already {@link #closed},\n * because it has already been unsubscribed, then whatever finalizer is passed to it\n * will automatically be executed (unless the finalizer itself is also a closed subscription).\n *\n * Closed Subscriptions cannot be added as finalizers to any subscription. Adding a closed\n * subscription to a any subscription will result in no operation. (A noop).\n *\n * Adding a subscription to itself, or adding `null` or `undefined` will not perform any\n * operation at all. (A noop).\n *\n * `Subscription` instances that are added to this instance will automatically remove themselves\n * if they are unsubscribed. Functions and {@link Unsubscribable} objects that you wish to remove\n * will need to be removed manually with {@link #remove}\n *\n * @param teardown The finalization logic to add to this subscription.\n */\n add(teardown: TeardownLogic): void {\n // Only add the finalizer if it's not undefined\n // and don't add a subscription to itself.\n if (teardown && teardown !== this) {\n if (this.closed) {\n // If this subscription is already closed,\n // execute whatever finalizer is handed to it automatically.\n execFinalizer(teardown);\n } else {\n if (teardown instanceof Subscription) {\n // We don't add closed subscriptions, and we don't add the same subscription\n // twice. Subscription unsubscribe is idempotent.\n if (teardown.closed || teardown._hasParent(this)) {\n return;\n }\n teardown._addParent(this);\n }\n (this._finalizers = this._finalizers ?? []).push(teardown);\n }\n }\n }\n\n /**\n * Checks to see if a this subscription already has a particular parent.\n * This will signal that this subscription has already been added to the parent in question.\n * @param parent the parent to check for\n */\n private _hasParent(parent: Subscription) {\n const { _parentage } = this;\n return _parentage === parent || (Array.isArray(_parentage) && _parentage.includes(parent));\n }\n\n /**\n * Adds a parent to this subscription so it can be removed from the parent if it\n * unsubscribes on it's own.\n *\n * NOTE: THIS ASSUMES THAT {@link _hasParent} HAS ALREADY BEEN CHECKED.\n * @param parent The parent subscription to add\n */\n private _addParent(parent: Subscription) {\n const { _parentage } = this;\n this._parentage = Array.isArray(_parentage) ? (_parentage.push(parent), _parentage) : _parentage ? [_parentage, parent] : parent;\n }\n\n /**\n * Called on a child when it is removed via {@link #remove}.\n * @param parent The parent to remove\n */\n private _removeParent(parent: Subscription) {\n const { _parentage } = this;\n if (_parentage === parent) {\n this._parentage = null;\n } else if (Array.isArray(_parentage)) {\n arrRemove(_parentage, parent);\n }\n }\n\n /**\n * Removes a finalizer from this subscription that was previously added with the {@link #add} method.\n *\n * Note that `Subscription` instances, when unsubscribed, will automatically remove themselves\n * from every other `Subscription` they have been added to. This means that using the `remove` method\n * is not a common thing and should be used thoughtfully.\n *\n * If you add the same finalizer instance of a function or an unsubscribable object to a `Subscription` instance\n * more than once, you will need to call `remove` the same number of times to remove all instances.\n *\n * All finalizer instances are removed to free up memory upon unsubscription.\n *\n * @param teardown The finalizer to remove from this subscription\n */\n remove(teardown: Exclude): void {\n const { _finalizers } = this;\n _finalizers && arrRemove(_finalizers, teardown);\n\n if (teardown instanceof Subscription) {\n teardown._removeParent(this);\n }\n }\n}\n\nexport const EMPTY_SUBSCRIPTION = Subscription.EMPTY;\n\nexport function isSubscription(value: any): value is Subscription {\n return (\n value instanceof Subscription ||\n (value && 'closed' in value && isFunction(value.remove) && isFunction(value.add) && isFunction(value.unsubscribe))\n );\n}\n\nfunction execFinalizer(finalizer: Unsubscribable | (() => void)) {\n if (isFunction(finalizer)) {\n finalizer();\n } else {\n finalizer.unsubscribe();\n }\n}\n", "import { Subscriber } from './Subscriber';\nimport { ObservableNotification } from './types';\n\n/**\n * The {@link GlobalConfig} object for RxJS. It is used to configure things\n * like how to react on unhandled errors.\n */\nexport const config: GlobalConfig = {\n onUnhandledError: null,\n onStoppedNotification: null,\n Promise: undefined,\n useDeprecatedSynchronousErrorHandling: false,\n useDeprecatedNextContext: false,\n};\n\n/**\n * The global configuration object for RxJS, used to configure things\n * like how to react on unhandled errors. Accessible via {@link config}\n * object.\n */\nexport interface GlobalConfig {\n /**\n * A registration point for unhandled errors from RxJS. These are errors that\n * cannot were not handled by consuming code in the usual subscription path. For\n * example, if you have this configured, and you subscribe to an observable without\n * providing an error handler, errors from that subscription will end up here. This\n * will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onUnhandledError: ((err: any) => void) | null;\n\n /**\n * A registration point for notifications that cannot be sent to subscribers because they\n * have completed, errored or have been explicitly unsubscribed. By default, next, complete\n * and error notifications sent to stopped subscribers are noops. However, sometimes callers\n * might want a different behavior. For example, with sources that attempt to report errors\n * to stopped subscribers, a caller can configure RxJS to throw an unhandled error instead.\n * This will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onStoppedNotification: ((notification: ObservableNotification, subscriber: Subscriber) => void) | null;\n\n /**\n * The promise constructor used by default for {@link Observable#toPromise toPromise} and {@link Observable#forEach forEach}\n * methods.\n *\n * @deprecated As of version 8, RxJS will no longer support this sort of injection of a\n * Promise constructor. If you need a Promise implementation other than native promises,\n * please polyfill/patch Promise as you see appropriate. Will be removed in v8.\n */\n Promise?: PromiseConstructorLike;\n\n /**\n * If true, turns on synchronous error rethrowing, which is a deprecated behavior\n * in v6 and higher. This behavior enables bad patterns like wrapping a subscribe\n * call in a try/catch block. It also enables producer interference, a nasty bug\n * where a multicast can be broken for all observers by a downstream consumer with\n * an unhandled error. DO NOT USE THIS FLAG UNLESS IT'S NEEDED TO BUY TIME\n * FOR MIGRATION REASONS.\n *\n * @deprecated As of version 8, RxJS will no longer support synchronous throwing\n * of unhandled errors. All errors will be thrown on a separate call stack to prevent bad\n * behaviors described above. Will be removed in v8.\n */\n useDeprecatedSynchronousErrorHandling: boolean;\n\n /**\n * If true, enables an as-of-yet undocumented feature from v5: The ability to access\n * `unsubscribe()` via `this` context in `next` functions created in observers passed\n * to `subscribe`.\n *\n * This is being removed because the performance was severely problematic, and it could also cause\n * issues when types other than POJOs are passed to subscribe as subscribers, as they will likely have\n * their `this` context overwritten.\n *\n * @deprecated As of version 8, RxJS will no longer support altering the\n * context of next functions provided as part of an observer to Subscribe. Instead,\n * you will have access to a subscription or a signal or token that will allow you to do things like\n * unsubscribe and test closed status. Will be removed in v8.\n */\n useDeprecatedNextContext: boolean;\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetTimeoutFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearTimeoutFunction = (handle: TimerHandle) => void;\n\ninterface TimeoutProvider {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n delegate:\n | {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n }\n | undefined;\n}\n\nexport const timeoutProvider: TimeoutProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setTimeout(handler: () => void, timeout?: number, ...args) {\n const { delegate } = timeoutProvider;\n if (delegate?.setTimeout) {\n return delegate.setTimeout(handler, timeout, ...args);\n }\n return setTimeout(handler, timeout, ...args);\n },\n clearTimeout(handle) {\n const { delegate } = timeoutProvider;\n return (delegate?.clearTimeout || clearTimeout)(handle as any);\n },\n delegate: undefined,\n};\n", "import { config } from '../config';\nimport { timeoutProvider } from '../scheduler/timeoutProvider';\n\n/**\n * Handles an error on another job either with the user-configured {@link onUnhandledError},\n * or by throwing it on that new job so it can be picked up by `window.onerror`, `process.on('error')`, etc.\n *\n * This should be called whenever there is an error that is out-of-band with the subscription\n * or when an error hits a terminal boundary of the subscription and no error handler was provided.\n *\n * @param err the error to report\n */\nexport function reportUnhandledError(err: any) {\n timeoutProvider.setTimeout(() => {\n const { onUnhandledError } = config;\n if (onUnhandledError) {\n // Execute the user-configured error handler.\n onUnhandledError(err);\n } else {\n // Throw so it is picked up by the runtime's uncaught error mechanism.\n throw err;\n }\n });\n}\n", "/* tslint:disable:no-empty */\nexport function noop() { }\n", "import { CompleteNotification, NextNotification, ErrorNotification } from './types';\n\n/**\n * A completion object optimized for memory use and created to be the\n * same \"shape\" as other notifications in v8.\n * @internal\n */\nexport const COMPLETE_NOTIFICATION = (() => createNotification('C', undefined, undefined) as CompleteNotification)();\n\n/**\n * Internal use only. Creates an optimized error notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function errorNotification(error: any): ErrorNotification {\n return createNotification('E', undefined, error) as any;\n}\n\n/**\n * Internal use only. Creates an optimized next notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function nextNotification(value: T) {\n return createNotification('N', value, undefined) as NextNotification;\n}\n\n/**\n * Ensures that all notifications created internally have the same \"shape\" in v8.\n *\n * TODO: This is only exported to support a crazy legacy test in `groupBy`.\n * @internal\n */\nexport function createNotification(kind: 'N' | 'E' | 'C', value: any, error: any) {\n return {\n kind,\n value,\n error,\n };\n}\n", "import { config } from '../config';\n\nlet context: { errorThrown: boolean; error: any } | null = null;\n\n/**\n * Handles dealing with errors for super-gross mode. Creates a context, in which\n * any synchronously thrown errors will be passed to {@link captureError}. Which\n * will record the error such that it will be rethrown after the call back is complete.\n * TODO: Remove in v8\n * @param cb An immediately executed function.\n */\nexport function errorContext(cb: () => void) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n const isRoot = !context;\n if (isRoot) {\n context = { errorThrown: false, error: null };\n }\n cb();\n if (isRoot) {\n const { errorThrown, error } = context!;\n context = null;\n if (errorThrown) {\n throw error;\n }\n }\n } else {\n // This is the general non-deprecated path for everyone that\n // isn't crazy enough to use super-gross mode (useDeprecatedSynchronousErrorHandling)\n cb();\n }\n}\n\n/**\n * Captures errors only in super-gross mode.\n * @param err the error to capture\n */\nexport function captureError(err: any) {\n if (config.useDeprecatedSynchronousErrorHandling && context) {\n context.errorThrown = true;\n context.error = err;\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { Observer, ObservableNotification } from './types';\nimport { isSubscription, Subscription } from './Subscription';\nimport { config } from './config';\nimport { reportUnhandledError } from './util/reportUnhandledError';\nimport { noop } from './util/noop';\nimport { nextNotification, errorNotification, COMPLETE_NOTIFICATION } from './NotificationFactories';\nimport { timeoutProvider } from './scheduler/timeoutProvider';\nimport { captureError } from './util/errorContext';\n\n/**\n * Implements the {@link Observer} interface and extends the\n * {@link Subscription} class. While the {@link Observer} is the public API for\n * consuming the values of an {@link Observable}, all Observers get converted to\n * a Subscriber, in order to provide Subscription-like capabilities such as\n * `unsubscribe`. Subscriber is a common type in RxJS, and crucial for\n * implementing operators, but it is rarely used as a public API.\n *\n * @class Subscriber\n */\nexport class Subscriber extends Subscription implements Observer {\n /**\n * A static factory for a Subscriber, given a (potentially partial) definition\n * of an Observer.\n * @param next The `next` callback of an Observer.\n * @param error The `error` callback of an\n * Observer.\n * @param complete The `complete` callback of an\n * Observer.\n * @return A Subscriber wrapping the (partially defined)\n * Observer represented by the given arguments.\n * @nocollapse\n * @deprecated Do not use. Will be removed in v8. There is no replacement for this\n * method, and there is no reason to be creating instances of `Subscriber` directly.\n * If you have a specific use case, please file an issue.\n */\n static create(next?: (x?: T) => void, error?: (e?: any) => void, complete?: () => void): Subscriber {\n return new SafeSubscriber(next, error, complete);\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected isStopped: boolean = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected destination: Subscriber | Observer; // this `any` is the escape hatch to erase extra type param (e.g. R)\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * There is no reason to directly create an instance of Subscriber. This type is exported for typings reasons.\n */\n constructor(destination?: Subscriber | Observer) {\n super();\n if (destination) {\n this.destination = destination;\n // Automatically chain subscriptions together here.\n // if destination is a Subscription, then it is a Subscriber.\n if (isSubscription(destination)) {\n destination.add(this);\n }\n } else {\n this.destination = EMPTY_OBSERVER;\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `next` from\n * the Observable, with a value. The Observable may call this method 0 or more\n * times.\n * @param {T} [value] The `next` value.\n * @return {void}\n */\n next(value?: T): void {\n if (this.isStopped) {\n handleStoppedNotification(nextNotification(value), this);\n } else {\n this._next(value!);\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `error` from\n * the Observable, with an attached `Error`. Notifies the Observer that\n * the Observable has experienced an error condition.\n * @param {any} [err] The `error` exception.\n * @return {void}\n */\n error(err?: any): void {\n if (this.isStopped) {\n handleStoppedNotification(errorNotification(err), this);\n } else {\n this.isStopped = true;\n this._error(err);\n }\n }\n\n /**\n * The {@link Observer} callback to receive a valueless notification of type\n * `complete` from the Observable. Notifies the Observer that the Observable\n * has finished sending push-based notifications.\n * @return {void}\n */\n complete(): void {\n if (this.isStopped) {\n handleStoppedNotification(COMPLETE_NOTIFICATION, this);\n } else {\n this.isStopped = true;\n this._complete();\n }\n }\n\n unsubscribe(): void {\n if (!this.closed) {\n this.isStopped = true;\n super.unsubscribe();\n this.destination = null!;\n }\n }\n\n protected _next(value: T): void {\n this.destination.next(value);\n }\n\n protected _error(err: any): void {\n try {\n this.destination.error(err);\n } finally {\n this.unsubscribe();\n }\n }\n\n protected _complete(): void {\n try {\n this.destination.complete();\n } finally {\n this.unsubscribe();\n }\n }\n}\n\n/**\n * This bind is captured here because we want to be able to have\n * compatibility with monoid libraries that tend to use a method named\n * `bind`. In particular, a library called Monio requires this.\n */\nconst _bind = Function.prototype.bind;\n\nfunction bind any>(fn: Fn, thisArg: any): Fn {\n return _bind.call(fn, thisArg);\n}\n\n/**\n * Internal optimization only, DO NOT EXPOSE.\n * @internal\n */\nclass ConsumerObserver implements Observer {\n constructor(private partialObserver: Partial>) {}\n\n next(value: T): void {\n const { partialObserver } = this;\n if (partialObserver.next) {\n try {\n partialObserver.next(value);\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n\n error(err: any): void {\n const { partialObserver } = this;\n if (partialObserver.error) {\n try {\n partialObserver.error(err);\n } catch (error) {\n handleUnhandledError(error);\n }\n } else {\n handleUnhandledError(err);\n }\n }\n\n complete(): void {\n const { partialObserver } = this;\n if (partialObserver.complete) {\n try {\n partialObserver.complete();\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n}\n\nexport class SafeSubscriber extends Subscriber {\n constructor(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((e?: any) => void) | null,\n complete?: (() => void) | null\n ) {\n super();\n\n let partialObserver: Partial>;\n if (isFunction(observerOrNext) || !observerOrNext) {\n // The first argument is a function, not an observer. The next\n // two arguments *could* be observers, or they could be empty.\n partialObserver = {\n next: (observerOrNext ?? undefined) as (((value: T) => void) | undefined),\n error: error ?? undefined,\n complete: complete ?? undefined,\n };\n } else {\n // The first argument is a partial observer.\n let context: any;\n if (this && config.useDeprecatedNextContext) {\n // This is a deprecated path that made `this.unsubscribe()` available in\n // next handler functions passed to subscribe. This only exists behind a flag\n // now, as it is *very* slow.\n context = Object.create(observerOrNext);\n context.unsubscribe = () => this.unsubscribe();\n partialObserver = {\n next: observerOrNext.next && bind(observerOrNext.next, context),\n error: observerOrNext.error && bind(observerOrNext.error, context),\n complete: observerOrNext.complete && bind(observerOrNext.complete, context),\n };\n } else {\n // The \"normal\" path. Just use the partial observer directly.\n partialObserver = observerOrNext;\n }\n }\n\n // Wrap the partial observer to ensure it's a full observer, and\n // make sure proper error handling is accounted for.\n this.destination = new ConsumerObserver(partialObserver);\n }\n}\n\nfunction handleUnhandledError(error: any) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n captureError(error);\n } else {\n // Ideal path, we report this as an unhandled error,\n // which is thrown on a new call stack.\n reportUnhandledError(error);\n }\n}\n\n/**\n * An error handler used when no error handler was supplied\n * to the SafeSubscriber -- meaning no error handler was supplied\n * do the `subscribe` call on our observable.\n * @param err The error to handle\n */\nfunction defaultErrorHandler(err: any) {\n throw err;\n}\n\n/**\n * A handler for notifications that cannot be sent to a stopped subscriber.\n * @param notification The notification being sent\n * @param subscriber The stopped subscriber\n */\nfunction handleStoppedNotification(notification: ObservableNotification, subscriber: Subscriber) {\n const { onStoppedNotification } = config;\n onStoppedNotification && timeoutProvider.setTimeout(() => onStoppedNotification(notification, subscriber));\n}\n\n/**\n * The observer used as a stub for subscriptions where the user did not\n * pass any arguments to `subscribe`. Comes with the default error handling\n * behavior.\n */\nexport const EMPTY_OBSERVER: Readonly> & { closed: true } = {\n closed: true,\n next: noop,\n error: defaultErrorHandler,\n complete: noop,\n};\n", "/**\n * Symbol.observable or a string \"@@observable\". Used for interop\n *\n * @deprecated We will no longer be exporting this symbol in upcoming versions of RxJS.\n * Instead polyfill and use Symbol.observable directly *or* use https://www.npmjs.com/package/symbol-observable\n */\nexport const observable: string | symbol = (() => (typeof Symbol === 'function' && Symbol.observable) || '@@observable')();\n", "/**\n * This function takes one parameter and just returns it. Simply put,\n * this is like `(x: T): T => x`.\n *\n * ## Examples\n *\n * This is useful in some cases when using things like `mergeMap`\n *\n * ```ts\n * import { interval, take, map, range, mergeMap, identity } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(5));\n *\n * const result$ = source$.pipe(\n * map(i => range(i)),\n * mergeMap(identity) // same as mergeMap(x => x)\n * );\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * Or when you want to selectively apply an operator\n *\n * ```ts\n * import { interval, take, identity } from 'rxjs';\n *\n * const shouldLimit = () => Math.random() < 0.5;\n *\n * const source$ = interval(1000);\n *\n * const result$ = source$.pipe(shouldLimit() ? take(5) : identity);\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * @param x Any value that is returned by this function\n * @returns The value passed as the first parameter to this function\n */\nexport function identity(x: T): T {\n return x;\n}\n", "import { identity } from './identity';\nimport { UnaryFunction } from '../types';\n\nexport function pipe(): typeof identity;\nexport function pipe(fn1: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction, fn3: UnaryFunction): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction,\n ...fns: UnaryFunction[]\n): UnaryFunction;\n\n/**\n * pipe() can be called on one or more functions, each of which can take one argument (\"UnaryFunction\")\n * and uses it to return a value.\n * It returns a function that takes one argument, passes it to the first UnaryFunction, and then\n * passes the result to the next one, passes that result to the next one, and so on. \n */\nexport function pipe(...fns: Array>): UnaryFunction {\n return pipeFromArray(fns);\n}\n\n/** @internal */\nexport function pipeFromArray(fns: Array>): UnaryFunction {\n if (fns.length === 0) {\n return identity as UnaryFunction;\n }\n\n if (fns.length === 1) {\n return fns[0];\n }\n\n return function piped(input: T): R {\n return fns.reduce((prev: any, fn: UnaryFunction) => fn(prev), input as any);\n };\n}\n", "import { Operator } from './Operator';\nimport { SafeSubscriber, Subscriber } from './Subscriber';\nimport { isSubscription, Subscription } from './Subscription';\nimport { TeardownLogic, OperatorFunction, Subscribable, Observer } from './types';\nimport { observable as Symbol_observable } from './symbol/observable';\nimport { pipeFromArray } from './util/pipe';\nimport { config } from './config';\nimport { isFunction } from './util/isFunction';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A representation of any set of values over any amount of time. This is the most basic building block\n * of RxJS.\n *\n * @class Observable\n */\nexport class Observable implements Subscribable {\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n source: Observable | undefined;\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n operator: Operator | undefined;\n\n /**\n * @constructor\n * @param {Function} subscribe the function that is called when the Observable is\n * initially subscribed to. This function is given a Subscriber, to which new values\n * can be `next`ed, or an `error` method can be called to raise an error, or\n * `complete` can be called to notify of a successful completion.\n */\n constructor(subscribe?: (this: Observable, subscriber: Subscriber) => TeardownLogic) {\n if (subscribe) {\n this._subscribe = subscribe;\n }\n }\n\n // HACK: Since TypeScript inherits static properties too, we have to\n // fight against TypeScript here so Subject can have a different static create signature\n /**\n * Creates a new Observable by calling the Observable constructor\n * @owner Observable\n * @method create\n * @param {Function} subscribe? the subscriber function to be passed to the Observable constructor\n * @return {Observable} a new observable\n * @nocollapse\n * @deprecated Use `new Observable()` instead. Will be removed in v8.\n */\n static create: (...args: any[]) => any = (subscribe?: (subscriber: Subscriber) => TeardownLogic) => {\n return new Observable(subscribe);\n };\n\n /**\n * Creates a new Observable, with this Observable instance as the source, and the passed\n * operator defined as the new observable's operator.\n * @method lift\n * @param operator the operator defining the operation to take on the observable\n * @return a new observable with the Operator applied\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * If you have implemented an operator using `lift`, it is recommended that you create an\n * operator by simply returning `new Observable()` directly. See \"Creating new operators from\n * scratch\" section here: https://rxjs.dev/guide/operators\n */\n lift(operator?: Operator): Observable {\n const observable = new Observable();\n observable.source = this;\n observable.operator = operator;\n return observable;\n }\n\n subscribe(observerOrNext?: Partial> | ((value: T) => void)): Subscription;\n /** @deprecated Instead of passing separate callback arguments, use an observer argument. Signatures taking separate callback arguments will be removed in v8. Details: https://rxjs.dev/deprecations/subscribe-arguments */\n subscribe(next?: ((value: T) => void) | null, error?: ((error: any) => void) | null, complete?: (() => void) | null): Subscription;\n /**\n * Invokes an execution of an Observable and registers Observer handlers for notifications it will emit.\n *\n * Use it when you have all these Observables, but still nothing is happening.\n *\n * `subscribe` is not a regular operator, but a method that calls Observable's internal `subscribe` function. It\n * might be for example a function that you passed to Observable's constructor, but most of the time it is\n * a library implementation, which defines what will be emitted by an Observable, and when it be will emitted. This means\n * that calling `subscribe` is actually the moment when Observable starts its work, not when it is created, as it is often\n * the thought.\n *\n * Apart from starting the execution of an Observable, this method allows you to listen for values\n * that an Observable emits, as well as for when it completes or errors. You can achieve this in two\n * of the following ways.\n *\n * The first way is creating an object that implements {@link Observer} interface. It should have methods\n * defined by that interface, but note that it should be just a regular JavaScript object, which you can create\n * yourself in any way you want (ES6 class, classic function constructor, object literal etc.). In particular, do\n * not attempt to use any RxJS implementation details to create Observers - you don't need them. Remember also\n * that your object does not have to implement all methods. If you find yourself creating a method that doesn't\n * do anything, you can simply omit it. Note however, if the `error` method is not provided and an error happens,\n * it will be thrown asynchronously. Errors thrown asynchronously cannot be caught using `try`/`catch`. Instead,\n * use the {@link onUnhandledError} configuration option or use a runtime handler (like `window.onerror` or\n * `process.on('error)`) to be notified of unhandled errors. Because of this, it's recommended that you provide\n * an `error` method to avoid missing thrown errors.\n *\n * The second way is to give up on Observer object altogether and simply provide callback functions in place of its methods.\n * This means you can provide three functions as arguments to `subscribe`, where the first function is equivalent\n * of a `next` method, the second of an `error` method and the third of a `complete` method. Just as in case of an Observer,\n * if you do not need to listen for something, you can omit a function by passing `undefined` or `null`,\n * since `subscribe` recognizes these functions by where they were placed in function call. When it comes\n * to the `error` function, as with an Observer, if not provided, errors emitted by an Observable will be thrown asynchronously.\n *\n * You can, however, subscribe with no parameters at all. This may be the case where you're not interested in terminal events\n * and you also handled emissions internally by using operators (e.g. using `tap`).\n *\n * Whichever style of calling `subscribe` you use, in both cases it returns a Subscription object.\n * This object allows you to call `unsubscribe` on it, which in turn will stop the work that an Observable does and will clean\n * up all resources that an Observable used. Note that cancelling a subscription will not call `complete` callback\n * provided to `subscribe` function, which is reserved for a regular completion signal that comes from an Observable.\n *\n * Remember that callbacks provided to `subscribe` are not guaranteed to be called asynchronously.\n * It is an Observable itself that decides when these functions will be called. For example {@link of}\n * by default emits all its values synchronously. Always check documentation for how given Observable\n * will behave when subscribed and if its default behavior can be modified with a `scheduler`.\n *\n * #### Examples\n *\n * Subscribe with an {@link guide/observer Observer}\n *\n * ```ts\n * import { of } from 'rxjs';\n *\n * const sumObserver = {\n * sum: 0,\n * next(value) {\n * console.log('Adding: ' + value);\n * this.sum = this.sum + value;\n * },\n * error() {\n * // We actually could just remove this method,\n * // since we do not really care about errors right now.\n * },\n * complete() {\n * console.log('Sum equals: ' + this.sum);\n * }\n * };\n *\n * of(1, 2, 3) // Synchronously emits 1, 2, 3 and then completes.\n * .subscribe(sumObserver);\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Subscribe with functions ({@link deprecations/subscribe-arguments deprecated})\n *\n * ```ts\n * import { of } from 'rxjs'\n *\n * let sum = 0;\n *\n * of(1, 2, 3).subscribe(\n * value => {\n * console.log('Adding: ' + value);\n * sum = sum + value;\n * },\n * undefined,\n * () => console.log('Sum equals: ' + sum)\n * );\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Cancel a subscription\n *\n * ```ts\n * import { interval } from 'rxjs';\n *\n * const subscription = interval(1000).subscribe({\n * next(num) {\n * console.log(num)\n * },\n * complete() {\n * // Will not be called, even when cancelling subscription.\n * console.log('completed!');\n * }\n * });\n *\n * setTimeout(() => {\n * subscription.unsubscribe();\n * console.log('unsubscribed!');\n * }, 2500);\n *\n * // Logs:\n * // 0 after 1s\n * // 1 after 2s\n * // 'unsubscribed!' after 2.5s\n * ```\n *\n * @param {Observer|Function} observerOrNext (optional) Either an observer with methods to be called,\n * or the first of three possible handlers, which is the handler for each value emitted from the subscribed\n * Observable.\n * @param {Function} error (optional) A handler for a terminal event resulting from an error. If no error handler is provided,\n * the error will be thrown asynchronously as unhandled.\n * @param {Function} complete (optional) A handler for a terminal event resulting from successful completion.\n * @return {Subscription} a subscription reference to the registered handlers\n * @method subscribe\n */\n subscribe(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((error: any) => void) | null,\n complete?: (() => void) | null\n ): Subscription {\n const subscriber = isSubscriber(observerOrNext) ? observerOrNext : new SafeSubscriber(observerOrNext, error, complete);\n\n errorContext(() => {\n const { operator, source } = this;\n subscriber.add(\n operator\n ? // We're dealing with a subscription in the\n // operator chain to one of our lifted operators.\n operator.call(subscriber, source)\n : source\n ? // If `source` has a value, but `operator` does not, something that\n // had intimate knowledge of our API, like our `Subject`, must have\n // set it. We're going to just call `_subscribe` directly.\n this._subscribe(subscriber)\n : // In all other cases, we're likely wrapping a user-provided initializer\n // function, so we need to catch errors and handle them appropriately.\n this._trySubscribe(subscriber)\n );\n });\n\n return subscriber;\n }\n\n /** @internal */\n protected _trySubscribe(sink: Subscriber): TeardownLogic {\n try {\n return this._subscribe(sink);\n } catch (err) {\n // We don't need to return anything in this case,\n // because it's just going to try to `add()` to a subscription\n // above.\n sink.error(err);\n }\n }\n\n /**\n * Used as a NON-CANCELLABLE means of subscribing to an observable, for use with\n * APIs that expect promises, like `async/await`. You cannot unsubscribe from this.\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * #### Example\n *\n * ```ts\n * import { interval, take } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(4));\n *\n * async function getTotal() {\n * let total = 0;\n *\n * await source$.forEach(value => {\n * total += value;\n * console.log('observable -> ' + value);\n * });\n *\n * return total;\n * }\n *\n * getTotal().then(\n * total => console.log('Total: ' + total)\n * );\n *\n * // Expected:\n * // 'observable -> 0'\n * // 'observable -> 1'\n * // 'observable -> 2'\n * // 'observable -> 3'\n * // 'Total: 6'\n * ```\n *\n * @param next a handler for each value emitted by the observable\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n */\n forEach(next: (value: T) => void): Promise;\n\n /**\n * @param next a handler for each value emitted by the observable\n * @param promiseCtor a constructor function used to instantiate the Promise\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n * @deprecated Passing a Promise constructor will no longer be available\n * in upcoming versions of RxJS. This is because it adds weight to the library, for very\n * little benefit. If you need this functionality, it is recommended that you either\n * polyfill Promise, or you create an adapter to convert the returned native promise\n * to whatever promise implementation you wanted. Will be removed in v8.\n */\n forEach(next: (value: T) => void, promiseCtor: PromiseConstructorLike): Promise;\n\n forEach(next: (value: T) => void, promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n const subscriber = new SafeSubscriber({\n next: (value) => {\n try {\n next(value);\n } catch (err) {\n reject(err);\n subscriber.unsubscribe();\n }\n },\n error: reject,\n complete: resolve,\n });\n this.subscribe(subscriber);\n }) as Promise;\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): TeardownLogic {\n return this.source?.subscribe(subscriber);\n }\n\n /**\n * An interop point defined by the es7-observable spec https://github.com/zenparsing/es-observable\n * @method Symbol.observable\n * @return {Observable} this instance of the observable\n */\n [Symbol_observable]() {\n return this;\n }\n\n /* tslint:disable:max-line-length */\n pipe(): Observable;\n pipe(op1: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction, op3: OperatorFunction): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction,\n ...operations: OperatorFunction[]\n ): Observable;\n /* tslint:enable:max-line-length */\n\n /**\n * Used to stitch together functional operators into a chain.\n * @method pipe\n * @return {Observable} the Observable result of all of the operators having\n * been called in the order they were passed in.\n *\n * ## Example\n *\n * ```ts\n * import { interval, filter, map, scan } from 'rxjs';\n *\n * interval(1000)\n * .pipe(\n * filter(x => x % 2 === 0),\n * map(x => x + x),\n * scan((acc, x) => acc + x)\n * )\n * .subscribe(x => console.log(x));\n * ```\n */\n pipe(...operations: OperatorFunction[]): Observable {\n return pipeFromArray(operations)(this);\n }\n\n /* tslint:disable:max-line-length */\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: typeof Promise): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: PromiseConstructorLike): Promise;\n /* tslint:enable:max-line-length */\n\n /**\n * Subscribe to this Observable and get a Promise resolving on\n * `complete` with the last emission (if any).\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * @method toPromise\n * @param [promiseCtor] a constructor function used to instantiate\n * the Promise\n * @return A Promise that resolves with the last value emit, or\n * rejects on an error. If there were no emissions, Promise\n * resolves with undefined.\n * @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise\n */\n toPromise(promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n let value: T | undefined;\n this.subscribe(\n (x: T) => (value = x),\n (err: any) => reject(err),\n () => resolve(value)\n );\n }) as Promise;\n }\n}\n\n/**\n * Decides between a passed promise constructor from consuming code,\n * A default configured promise constructor, and the native promise\n * constructor and returns it. If nothing can be found, it will throw\n * an error.\n * @param promiseCtor The optional promise constructor to passed by consuming code\n */\nfunction getPromiseCtor(promiseCtor: PromiseConstructorLike | undefined) {\n return promiseCtor ?? config.Promise ?? Promise;\n}\n\nfunction isObserver(value: any): value is Observer {\n return value && isFunction(value.next) && isFunction(value.error) && isFunction(value.complete);\n}\n\nfunction isSubscriber(value: any): value is Subscriber {\n return (value && value instanceof Subscriber) || (isObserver(value) && isSubscription(value));\n}\n", "import { Observable } from '../Observable';\nimport { Subscriber } from '../Subscriber';\nimport { OperatorFunction } from '../types';\nimport { isFunction } from './isFunction';\n\n/**\n * Used to determine if an object is an Observable with a lift function.\n */\nexport function hasLift(source: any): source is { lift: InstanceType['lift'] } {\n return isFunction(source?.lift);\n}\n\n/**\n * Creates an `OperatorFunction`. Used to define operators throughout the library in a concise way.\n * @param init The logic to connect the liftedSource to the subscriber at the moment of subscription.\n */\nexport function operate(\n init: (liftedSource: Observable, subscriber: Subscriber) => (() => void) | void\n): OperatorFunction {\n return (source: Observable) => {\n if (hasLift(source)) {\n return source.lift(function (this: Subscriber, liftedSource: Observable) {\n try {\n return init(liftedSource, this);\n } catch (err) {\n this.error(err);\n }\n });\n }\n throw new TypeError('Unable to lift unknown Observable type');\n };\n}\n", "import { Subscriber } from '../Subscriber';\n\n/**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional teardown logic here. This will only be called on teardown if the\n * subscriber itself is not already closed. This is called after all other teardown logic is executed.\n */\nexport function createOperatorSubscriber(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n onFinalize?: () => void\n): Subscriber {\n return new OperatorSubscriber(destination, onNext, onComplete, onError, onFinalize);\n}\n\n/**\n * A generic helper for allowing operators to be created with a Subscriber and\n * use closures to capture necessary state from the operator function itself.\n */\nexport class OperatorSubscriber extends Subscriber {\n /**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional finalization logic here. This will only be called on finalization if the\n * subscriber itself is not already closed. This is called after all other finalization logic is executed.\n * @param shouldUnsubscribe An optional check to see if an unsubscribe call should truly unsubscribe.\n * NOTE: This currently **ONLY** exists to support the strange behavior of {@link groupBy}, where unsubscription\n * to the resulting observable does not actually disconnect from the source if there are active subscriptions\n * to any grouped observable. (DO NOT EXPOSE OR USE EXTERNALLY!!!)\n */\n constructor(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n private onFinalize?: () => void,\n private shouldUnsubscribe?: () => boolean\n ) {\n // It's important - for performance reasons - that all of this class's\n // members are initialized and that they are always initialized in the same\n // order. This will ensure that all OperatorSubscriber instances have the\n // same hidden class in V8. This, in turn, will help keep the number of\n // hidden classes involved in property accesses within the base class as\n // low as possible. If the number of hidden classes involved exceeds four,\n // the property accesses will become megamorphic and performance penalties\n // will be incurred - i.e. inline caches won't be used.\n //\n // The reasons for ensuring all instances have the same hidden class are\n // further discussed in this blog post from Benedikt Meurer:\n // https://benediktmeurer.de/2018/03/23/impact-of-polymorphism-on-component-based-frameworks-like-react/\n super(destination);\n this._next = onNext\n ? function (this: OperatorSubscriber, value: T) {\n try {\n onNext(value);\n } catch (err) {\n destination.error(err);\n }\n }\n : super._next;\n this._error = onError\n ? function (this: OperatorSubscriber, err: any) {\n try {\n onError(err);\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._error;\n this._complete = onComplete\n ? function (this: OperatorSubscriber) {\n try {\n onComplete();\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._complete;\n }\n\n unsubscribe() {\n if (!this.shouldUnsubscribe || this.shouldUnsubscribe()) {\n const { closed } = this;\n super.unsubscribe();\n // Execute additional teardown if we have any and we didn't already do so.\n !closed && this.onFinalize?.();\n }\n }\n}\n", "import { Subscription } from '../Subscription';\n\ninterface AnimationFrameProvider {\n schedule(callback: FrameRequestCallback): Subscription;\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n delegate:\n | {\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n }\n | undefined;\n}\n\nexport const animationFrameProvider: AnimationFrameProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n schedule(callback) {\n let request = requestAnimationFrame;\n let cancel: typeof cancelAnimationFrame | undefined = cancelAnimationFrame;\n const { delegate } = animationFrameProvider;\n if (delegate) {\n request = delegate.requestAnimationFrame;\n cancel = delegate.cancelAnimationFrame;\n }\n const handle = request((timestamp) => {\n // Clear the cancel function. The request has been fulfilled, so\n // attempting to cancel the request upon unsubscription would be\n // pointless.\n cancel = undefined;\n callback(timestamp);\n });\n return new Subscription(() => cancel?.(handle));\n },\n requestAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.requestAnimationFrame || requestAnimationFrame)(...args);\n },\n cancelAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.cancelAnimationFrame || cancelAnimationFrame)(...args);\n },\n delegate: undefined,\n};\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface ObjectUnsubscribedError extends Error {}\n\nexport interface ObjectUnsubscribedErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (): ObjectUnsubscribedError;\n}\n\n/**\n * An error thrown when an action is invalid because the object has been\n * unsubscribed.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n *\n * @class ObjectUnsubscribedError\n */\nexport const ObjectUnsubscribedError: ObjectUnsubscribedErrorCtor = createErrorClass(\n (_super) =>\n function ObjectUnsubscribedErrorImpl(this: any) {\n _super(this);\n this.name = 'ObjectUnsubscribedError';\n this.message = 'object unsubscribed';\n }\n);\n", "import { Operator } from './Operator';\nimport { Observable } from './Observable';\nimport { Subscriber } from './Subscriber';\nimport { Subscription, EMPTY_SUBSCRIPTION } from './Subscription';\nimport { Observer, SubscriptionLike, TeardownLogic } from './types';\nimport { ObjectUnsubscribedError } from './util/ObjectUnsubscribedError';\nimport { arrRemove } from './util/arrRemove';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A Subject is a special type of Observable that allows values to be\n * multicasted to many Observers. Subjects are like EventEmitters.\n *\n * Every Subject is an Observable and an Observer. You can subscribe to a\n * Subject, and you can call next to feed values as well as error and complete.\n */\nexport class Subject extends Observable implements SubscriptionLike {\n closed = false;\n\n private currentObservers: Observer[] | null = null;\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n observers: Observer[] = [];\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n isStopped = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n hasError = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n thrownError: any = null;\n\n /**\n * Creates a \"subject\" by basically gluing an observer to an observable.\n *\n * @nocollapse\n * @deprecated Recommended you do not use. Will be removed at some point in the future. Plans for replacement still under discussion.\n */\n static create: (...args: any[]) => any = (destination: Observer, source: Observable): AnonymousSubject => {\n return new AnonymousSubject(destination, source);\n };\n\n constructor() {\n // NOTE: This must be here to obscure Observable's constructor.\n super();\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n lift(operator: Operator): Observable {\n const subject = new AnonymousSubject(this, this);\n subject.operator = operator as any;\n return subject as any;\n }\n\n /** @internal */\n protected _throwIfClosed() {\n if (this.closed) {\n throw new ObjectUnsubscribedError();\n }\n }\n\n next(value: T) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n if (!this.currentObservers) {\n this.currentObservers = Array.from(this.observers);\n }\n for (const observer of this.currentObservers) {\n observer.next(value);\n }\n }\n });\n }\n\n error(err: any) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.hasError = this.isStopped = true;\n this.thrownError = err;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.error(err);\n }\n }\n });\n }\n\n complete() {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.isStopped = true;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.complete();\n }\n }\n });\n }\n\n unsubscribe() {\n this.isStopped = this.closed = true;\n this.observers = this.currentObservers = null!;\n }\n\n get observed() {\n return this.observers?.length > 0;\n }\n\n /** @internal */\n protected _trySubscribe(subscriber: Subscriber): TeardownLogic {\n this._throwIfClosed();\n return super._trySubscribe(subscriber);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._checkFinalizedStatuses(subscriber);\n return this._innerSubscribe(subscriber);\n }\n\n /** @internal */\n protected _innerSubscribe(subscriber: Subscriber) {\n const { hasError, isStopped, observers } = this;\n if (hasError || isStopped) {\n return EMPTY_SUBSCRIPTION;\n }\n this.currentObservers = null;\n observers.push(subscriber);\n return new Subscription(() => {\n this.currentObservers = null;\n arrRemove(observers, subscriber);\n });\n }\n\n /** @internal */\n protected _checkFinalizedStatuses(subscriber: Subscriber) {\n const { hasError, thrownError, isStopped } = this;\n if (hasError) {\n subscriber.error(thrownError);\n } else if (isStopped) {\n subscriber.complete();\n }\n }\n\n /**\n * Creates a new Observable with this Subject as the source. You can do this\n * to create custom Observer-side logic of the Subject and conceal it from\n * code that uses the Observable.\n * @return {Observable} Observable that the Subject casts to\n */\n asObservable(): Observable {\n const observable: any = new Observable();\n observable.source = this;\n return observable;\n }\n}\n\n/**\n * @class AnonymousSubject\n */\nexport class AnonymousSubject extends Subject {\n constructor(\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n public destination?: Observer,\n source?: Observable\n ) {\n super();\n this.source = source;\n }\n\n next(value: T) {\n this.destination?.next?.(value);\n }\n\n error(err: any) {\n this.destination?.error?.(err);\n }\n\n complete() {\n this.destination?.complete?.();\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n return this.source?.subscribe(subscriber) ?? EMPTY_SUBSCRIPTION;\n }\n}\n", "import { TimestampProvider } from '../types';\n\ninterface DateTimestampProvider extends TimestampProvider {\n delegate: TimestampProvider | undefined;\n}\n\nexport const dateTimestampProvider: DateTimestampProvider = {\n now() {\n // Use the variable rather than `this` so that the function can be called\n // without being bound to the provider.\n return (dateTimestampProvider.delegate || Date).now();\n },\n delegate: undefined,\n};\n", "import { Subject } from './Subject';\nimport { TimestampProvider } from './types';\nimport { Subscriber } from './Subscriber';\nimport { Subscription } from './Subscription';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * A variant of {@link Subject} that \"replays\" old values to new subscribers by emitting them when they first subscribe.\n *\n * `ReplaySubject` has an internal buffer that will store a specified number of values that it has observed. Like `Subject`,\n * `ReplaySubject` \"observes\" values by having them passed to its `next` method. When it observes a value, it will store that\n * value for a time determined by the configuration of the `ReplaySubject`, as passed to its constructor.\n *\n * When a new subscriber subscribes to the `ReplaySubject` instance, it will synchronously emit all values in its buffer in\n * a First-In-First-Out (FIFO) manner. The `ReplaySubject` will also complete, if it has observed completion; and it will\n * error if it has observed an error.\n *\n * There are two main configuration items to be concerned with:\n *\n * 1. `bufferSize` - This will determine how many items are stored in the buffer, defaults to infinite.\n * 2. `windowTime` - The amount of time to hold a value in the buffer before removing it from the buffer.\n *\n * Both configurations may exist simultaneously. So if you would like to buffer a maximum of 3 values, as long as the values\n * are less than 2 seconds old, you could do so with a `new ReplaySubject(3, 2000)`.\n *\n * ### Differences with BehaviorSubject\n *\n * `BehaviorSubject` is similar to `new ReplaySubject(1)`, with a couple of exceptions:\n *\n * 1. `BehaviorSubject` comes \"primed\" with a single value upon construction.\n * 2. `ReplaySubject` will replay values, even after observing an error, where `BehaviorSubject` will not.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n * @see {@link shareReplay}\n */\nexport class ReplaySubject extends Subject {\n private _buffer: (T | number)[] = [];\n private _infiniteTimeWindow = true;\n\n /**\n * @param bufferSize The size of the buffer to replay on subscription\n * @param windowTime The amount of time the buffered items will stay buffered\n * @param timestampProvider An object with a `now()` method that provides the current timestamp. This is used to\n * calculate the amount of time something has been buffered.\n */\n constructor(\n private _bufferSize = Infinity,\n private _windowTime = Infinity,\n private _timestampProvider: TimestampProvider = dateTimestampProvider\n ) {\n super();\n this._infiniteTimeWindow = _windowTime === Infinity;\n this._bufferSize = Math.max(1, _bufferSize);\n this._windowTime = Math.max(1, _windowTime);\n }\n\n next(value: T): void {\n const { isStopped, _buffer, _infiniteTimeWindow, _timestampProvider, _windowTime } = this;\n if (!isStopped) {\n _buffer.push(value);\n !_infiniteTimeWindow && _buffer.push(_timestampProvider.now() + _windowTime);\n }\n this._trimBuffer();\n super.next(value);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._trimBuffer();\n\n const subscription = this._innerSubscribe(subscriber);\n\n const { _infiniteTimeWindow, _buffer } = this;\n // We use a copy here, so reentrant code does not mutate our array while we're\n // emitting it to a new subscriber.\n const copy = _buffer.slice();\n for (let i = 0; i < copy.length && !subscriber.closed; i += _infiniteTimeWindow ? 1 : 2) {\n subscriber.next(copy[i] as T);\n }\n\n this._checkFinalizedStatuses(subscriber);\n\n return subscription;\n }\n\n private _trimBuffer() {\n const { _bufferSize, _timestampProvider, _buffer, _infiniteTimeWindow } = this;\n // If we don't have an infinite buffer size, and we're over the length,\n // use splice to truncate the old buffer values off. Note that we have to\n // double the size for instances where we're not using an infinite time window\n // because we're storing the values and the timestamps in the same array.\n const adjustedBufferSize = (_infiniteTimeWindow ? 1 : 2) * _bufferSize;\n _bufferSize < Infinity && adjustedBufferSize < _buffer.length && _buffer.splice(0, _buffer.length - adjustedBufferSize);\n\n // Now, if we're not in an infinite time window, remove all values where the time is\n // older than what is allowed.\n if (!_infiniteTimeWindow) {\n const now = _timestampProvider.now();\n let last = 0;\n // Search the array for the first timestamp that isn't expired and\n // truncate the buffer up to that point.\n for (let i = 1; i < _buffer.length && (_buffer[i] as number) <= now; i += 2) {\n last = i;\n }\n last && _buffer.splice(0, last + 1);\n }\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Subscription } from '../Subscription';\nimport { SchedulerAction } from '../types';\n\n/**\n * A unit of work to be executed in a `scheduler`. An action is typically\n * created from within a {@link SchedulerLike} and an RxJS user does not need to concern\n * themselves about creating and manipulating an Action.\n *\n * ```ts\n * class Action extends Subscription {\n * new (scheduler: Scheduler, work: (state?: T) => void);\n * schedule(state?: T, delay: number = 0): Subscription;\n * }\n * ```\n *\n * @class Action\n */\nexport class Action extends Subscription {\n constructor(scheduler: Scheduler, work: (this: SchedulerAction, state?: T) => void) {\n super();\n }\n /**\n * Schedules this action on its parent {@link SchedulerLike} for execution. May be passed\n * some context object, `state`. May happen at some point in the future,\n * according to the `delay` parameter, if specified.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler.\n * @return {void}\n */\n public schedule(state?: T, delay: number = 0): Subscription {\n return this;\n }\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetIntervalFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearIntervalFunction = (handle: TimerHandle) => void;\n\ninterface IntervalProvider {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n delegate:\n | {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n }\n | undefined;\n}\n\nexport const intervalProvider: IntervalProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setInterval(handler: () => void, timeout?: number, ...args) {\n const { delegate } = intervalProvider;\n if (delegate?.setInterval) {\n return delegate.setInterval(handler, timeout, ...args);\n }\n return setInterval(handler, timeout, ...args);\n },\n clearInterval(handle) {\n const { delegate } = intervalProvider;\n return (delegate?.clearInterval || clearInterval)(handle as any);\n },\n delegate: undefined,\n};\n", "import { Action } from './Action';\nimport { SchedulerAction } from '../types';\nimport { Subscription } from '../Subscription';\nimport { AsyncScheduler } from './AsyncScheduler';\nimport { intervalProvider } from './intervalProvider';\nimport { arrRemove } from '../util/arrRemove';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncAction extends Action {\n public id: TimerHandle | undefined;\n public state?: T;\n // @ts-ignore: Property has no initializer and is not definitely assigned\n public delay: number;\n protected pending: boolean = false;\n\n constructor(protected scheduler: AsyncScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n public schedule(state?: T, delay: number = 0): Subscription {\n if (this.closed) {\n return this;\n }\n\n // Always replace the current state with the new state.\n this.state = state;\n\n const id = this.id;\n const scheduler = this.scheduler;\n\n //\n // Important implementation note:\n //\n // Actions only execute once by default, unless rescheduled from within the\n // scheduled callback. This allows us to implement single and repeat\n // actions via the same code path, without adding API surface area, as well\n // as mimic traditional recursion but across asynchronous boundaries.\n //\n // However, JS runtimes and timers distinguish between intervals achieved by\n // serial `setTimeout` calls vs. a single `setInterval` call. An interval of\n // serial `setTimeout` calls can be individually delayed, which delays\n // scheduling the next `setTimeout`, and so on. `setInterval` attempts to\n // guarantee the interval callback will be invoked more precisely to the\n // interval period, regardless of load.\n //\n // Therefore, we use `setInterval` to schedule single and repeat actions.\n // If the action reschedules itself with the same delay, the interval is not\n // canceled. If the action doesn't reschedule, or reschedules with a\n // different delay, the interval will be canceled after scheduled callback\n // execution.\n //\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, delay);\n }\n\n // Set the pending flag indicating that this action has been scheduled, or\n // has recursively rescheduled itself.\n this.pending = true;\n\n this.delay = delay;\n // If this action has already an async Id, don't request a new one.\n this.id = this.id ?? this.requestAsyncId(scheduler, this.id, delay);\n\n return this;\n }\n\n protected requestAsyncId(scheduler: AsyncScheduler, _id?: TimerHandle, delay: number = 0): TimerHandle {\n return intervalProvider.setInterval(scheduler.flush.bind(scheduler, this), delay);\n }\n\n protected recycleAsyncId(_scheduler: AsyncScheduler, id?: TimerHandle, delay: number | null = 0): TimerHandle | undefined {\n // If this action is rescheduled with the same delay time, don't clear the interval id.\n if (delay != null && this.delay === delay && this.pending === false) {\n return id;\n }\n // Otherwise, if the action's delay time is different from the current delay,\n // or the action has been rescheduled before it's executed, clear the interval id\n if (id != null) {\n intervalProvider.clearInterval(id);\n }\n\n return undefined;\n }\n\n /**\n * Immediately executes this action and the `work` it contains.\n * @return {any}\n */\n public execute(state: T, delay: number): any {\n if (this.closed) {\n return new Error('executing a cancelled action');\n }\n\n this.pending = false;\n const error = this._execute(state, delay);\n if (error) {\n return error;\n } else if (this.pending === false && this.id != null) {\n // Dequeue if the action didn't reschedule itself. Don't call\n // unsubscribe(), because the action could reschedule later.\n // For example:\n // ```\n // scheduler.schedule(function doWork(counter) {\n // /* ... I'm a busy worker bee ... */\n // var originalAction = this;\n // /* wait 100ms before rescheduling the action */\n // setTimeout(function () {\n // originalAction.schedule(counter + 1);\n // }, 100);\n // }, 1000);\n // ```\n this.id = this.recycleAsyncId(this.scheduler, this.id, null);\n }\n }\n\n protected _execute(state: T, _delay: number): any {\n let errored: boolean = false;\n let errorValue: any;\n try {\n this.work(state);\n } catch (e) {\n errored = true;\n // HACK: Since code elsewhere is relying on the \"truthiness\" of the\n // return here, we can't have it return \"\" or 0 or false.\n // TODO: Clean this up when we refactor schedulers mid-version-8 or so.\n errorValue = e ? e : new Error('Scheduled action threw falsy error');\n }\n if (errored) {\n this.unsubscribe();\n return errorValue;\n }\n }\n\n unsubscribe() {\n if (!this.closed) {\n const { id, scheduler } = this;\n const { actions } = scheduler;\n\n this.work = this.state = this.scheduler = null!;\n this.pending = false;\n\n arrRemove(actions, this);\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, null);\n }\n\n this.delay = null!;\n super.unsubscribe();\n }\n }\n}\n", "import { Action } from './scheduler/Action';\nimport { Subscription } from './Subscription';\nimport { SchedulerLike, SchedulerAction } from './types';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * An execution context and a data structure to order tasks and schedule their\n * execution. Provides a notion of (potentially virtual) time, through the\n * `now()` getter method.\n *\n * Each unit of work in a Scheduler is called an `Action`.\n *\n * ```ts\n * class Scheduler {\n * now(): number;\n * schedule(work, delay?, state?): Subscription;\n * }\n * ```\n *\n * @class Scheduler\n * @deprecated Scheduler is an internal implementation detail of RxJS, and\n * should not be used directly. Rather, create your own class and implement\n * {@link SchedulerLike}. Will be made internal in v8.\n */\nexport class Scheduler implements SchedulerLike {\n public static now: () => number = dateTimestampProvider.now;\n\n constructor(private schedulerActionCtor: typeof Action, now: () => number = Scheduler.now) {\n this.now = now;\n }\n\n /**\n * A getter method that returns a number representing the current time\n * (at the time this function was called) according to the scheduler's own\n * internal clock.\n * @return {number} A number that represents the current time. May or may not\n * have a relation to wall-clock time. May or may not refer to a time unit\n * (e.g. milliseconds).\n */\n public now: () => number;\n\n /**\n * Schedules a function, `work`, for execution. May happen at some point in\n * the future, according to the `delay` parameter, if specified. May be passed\n * some context object, `state`, which will be passed to the `work` function.\n *\n * The given arguments will be processed an stored as an Action object in a\n * queue of actions.\n *\n * @param {function(state: ?T): ?Subscription} work A function representing a\n * task, or some unit of work to be executed by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler itself.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @return {Subscription} A subscription in order to be able to unsubscribe\n * the scheduled work.\n */\n public schedule(work: (this: SchedulerAction, state?: T) => void, delay: number = 0, state?: T): Subscription {\n return new this.schedulerActionCtor(this, work).schedule(state, delay);\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Action } from './Action';\nimport { AsyncAction } from './AsyncAction';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncScheduler extends Scheduler {\n public actions: Array> = [];\n /**\n * A flag to indicate whether the Scheduler is currently executing a batch of\n * queued actions.\n * @type {boolean}\n * @internal\n */\n public _active: boolean = false;\n /**\n * An internal ID used to track the latest asynchronous task such as those\n * coming from `setTimeout`, `setInterval`, `requestAnimationFrame`, and\n * others.\n * @type {any}\n * @internal\n */\n public _scheduled: TimerHandle | undefined;\n\n constructor(SchedulerAction: typeof Action, now: () => number = Scheduler.now) {\n super(SchedulerAction, now);\n }\n\n public flush(action: AsyncAction): void {\n const { actions } = this;\n\n if (this._active) {\n actions.push(action);\n return;\n }\n\n let error: any;\n this._active = true;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions.shift()!)); // exhaust the scheduler queue\n\n this._active = false;\n\n if (error) {\n while ((action = actions.shift()!)) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\n/**\n *\n * Async Scheduler\n *\n * Schedule task as if you used setTimeout(task, duration)\n *\n * `async` scheduler schedules tasks asynchronously, by putting them on the JavaScript\n * event loop queue. It is best used to delay tasks in time or to schedule tasks repeating\n * in intervals.\n *\n * If you just want to \"defer\" task, that is to perform it right after currently\n * executing synchronous code ends (commonly achieved by `setTimeout(deferredTask, 0)`),\n * better choice will be the {@link asapScheduler} scheduler.\n *\n * ## Examples\n * Use async scheduler to delay task\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * const task = () => console.log('it works!');\n *\n * asyncScheduler.schedule(task, 2000);\n *\n * // After 2 seconds logs:\n * // \"it works!\"\n * ```\n *\n * Use async scheduler to repeat task in intervals\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * function task(state) {\n * console.log(state);\n * this.schedule(state + 1, 1000); // `this` references currently executing Action,\n * // which we reschedule with new state and delay\n * }\n *\n * asyncScheduler.schedule(task, 3000, 0);\n *\n * // Logs:\n * // 0 after 3s\n * // 1 after 4s\n * // 2 after 5s\n * // 3 after 6s\n * ```\n */\n\nexport const asyncScheduler = new AsyncScheduler(AsyncAction);\n\n/**\n * @deprecated Renamed to {@link asyncScheduler}. Will be removed in v8.\n */\nexport const async = asyncScheduler;\n", "import { AsyncAction } from './AsyncAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\nimport { SchedulerAction } from '../types';\nimport { animationFrameProvider } from './animationFrameProvider';\nimport { TimerHandle } from './timerHandle';\n\nexport class AnimationFrameAction extends AsyncAction {\n constructor(protected scheduler: AnimationFrameScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n protected requestAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle {\n // If delay is greater than 0, request as an async action.\n if (delay !== null && delay > 0) {\n return super.requestAsyncId(scheduler, id, delay);\n }\n // Push the action to the end of the scheduler queue.\n scheduler.actions.push(this);\n // If an animation frame has already been requested, don't request another\n // one. If an animation frame hasn't been requested yet, request one. Return\n // the current animation frame request id.\n return scheduler._scheduled || (scheduler._scheduled = animationFrameProvider.requestAnimationFrame(() => scheduler.flush(undefined)));\n }\n\n protected recycleAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle | undefined {\n // If delay exists and is greater than 0, or if the delay is null (the\n // action wasn't rescheduled) but was originally scheduled as an async\n // action, then recycle as an async action.\n if (delay != null ? delay > 0 : this.delay > 0) {\n return super.recycleAsyncId(scheduler, id, delay);\n }\n // If the scheduler queue has no remaining actions with the same async id,\n // cancel the requested animation frame and set the scheduled flag to\n // undefined so the next AnimationFrameAction will request its own.\n const { actions } = scheduler;\n if (id != null && actions[actions.length - 1]?.id !== id) {\n animationFrameProvider.cancelAnimationFrame(id as number);\n scheduler._scheduled = undefined;\n }\n // Return undefined so the action knows to request a new async id if it's rescheduled.\n return undefined;\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\nexport class AnimationFrameScheduler extends AsyncScheduler {\n public flush(action?: AsyncAction): void {\n this._active = true;\n // The async id that effects a call to flush is stored in _scheduled.\n // Before executing an action, it's necessary to check the action's async\n // id to determine whether it's supposed to be executed in the current\n // flush.\n // Previous implementations of this method used a count to determine this,\n // but that was unsound, as actions that are unsubscribed - i.e. cancelled -\n // are removed from the actions array and that can shift actions that are\n // scheduled to be executed in a subsequent flush into positions at which\n // they are executed within the current flush.\n const flushId = this._scheduled;\n this._scheduled = undefined;\n\n const { actions } = this;\n let error: any;\n action = action || actions.shift()!;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions[0]) && action.id === flushId && actions.shift());\n\n this._active = false;\n\n if (error) {\n while ((action = actions[0]) && action.id === flushId && actions.shift()) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AnimationFrameAction } from './AnimationFrameAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\n\n/**\n *\n * Animation Frame Scheduler\n *\n * Perform task when `window.requestAnimationFrame` would fire\n *\n * When `animationFrame` scheduler is used with delay, it will fall back to {@link asyncScheduler} scheduler\n * behaviour.\n *\n * Without delay, `animationFrame` scheduler can be used to create smooth browser animations.\n * It makes sure scheduled task will happen just before next browser content repaint,\n * thus performing animations as efficiently as possible.\n *\n * ## Example\n * Schedule div height animation\n * ```ts\n * // html:
\n * import { animationFrameScheduler } from 'rxjs';\n *\n * const div = document.querySelector('div');\n *\n * animationFrameScheduler.schedule(function(height) {\n * div.style.height = height + \"px\";\n *\n * this.schedule(height + 1); // `this` references currently executing Action,\n * // which we reschedule with new state\n * }, 0, 0);\n *\n * // You will see a div element growing in height\n * ```\n */\n\nexport const animationFrameScheduler = new AnimationFrameScheduler(AnimationFrameAction);\n\n/**\n * @deprecated Renamed to {@link animationFrameScheduler}. Will be removed in v8.\n */\nexport const animationFrame = animationFrameScheduler;\n", "import { Observable } from '../Observable';\nimport { SchedulerLike } from '../types';\n\n/**\n * A simple Observable that emits no items to the Observer and immediately\n * emits a complete notification.\n *\n * Just emits 'complete', and nothing else.\n *\n * ![](empty.png)\n *\n * A simple Observable that only emits the complete notification. It can be used\n * for composing with other Observables, such as in a {@link mergeMap}.\n *\n * ## Examples\n *\n * Log complete notification\n *\n * ```ts\n * import { EMPTY } from 'rxjs';\n *\n * EMPTY.subscribe({\n * next: () => console.log('Next'),\n * complete: () => console.log('Complete!')\n * });\n *\n * // Outputs\n * // Complete!\n * ```\n *\n * Emit the number 7, then complete\n *\n * ```ts\n * import { EMPTY, startWith } from 'rxjs';\n *\n * const result = EMPTY.pipe(startWith(7));\n * result.subscribe(x => console.log(x));\n *\n * // Outputs\n * // 7\n * ```\n *\n * Map and flatten only odd numbers to the sequence `'a'`, `'b'`, `'c'`\n *\n * ```ts\n * import { interval, mergeMap, of, EMPTY } from 'rxjs';\n *\n * const interval$ = interval(1000);\n * const result = interval$.pipe(\n * mergeMap(x => x % 2 === 1 ? of('a', 'b', 'c') : EMPTY),\n * );\n * result.subscribe(x => console.log(x));\n *\n * // Results in the following to the console:\n * // x is equal to the count on the interval, e.g. (0, 1, 2, 3, ...)\n * // x will occur every 1000ms\n * // if x % 2 is equal to 1, print a, b, c (each on its own)\n * // if x % 2 is not equal to 1, nothing will be output\n * ```\n *\n * @see {@link Observable}\n * @see {@link NEVER}\n * @see {@link of}\n * @see {@link throwError}\n */\nexport const EMPTY = new Observable((subscriber) => subscriber.complete());\n\n/**\n * @param scheduler A {@link SchedulerLike} to use for scheduling\n * the emission of the complete notification.\n * @deprecated Replaced with the {@link EMPTY} constant or {@link scheduled} (e.g. `scheduled([], scheduler)`). Will be removed in v8.\n */\nexport function empty(scheduler?: SchedulerLike) {\n return scheduler ? emptyScheduled(scheduler) : EMPTY;\n}\n\nfunction emptyScheduled(scheduler: SchedulerLike) {\n return new Observable((subscriber) => scheduler.schedule(() => subscriber.complete()));\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport function isScheduler(value: any): value is SchedulerLike {\n return value && isFunction(value.schedule);\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\nimport { isScheduler } from './isScheduler';\n\nfunction last(arr: T[]): T | undefined {\n return arr[arr.length - 1];\n}\n\nexport function popResultSelector(args: any[]): ((...args: unknown[]) => unknown) | undefined {\n return isFunction(last(args)) ? args.pop() : undefined;\n}\n\nexport function popScheduler(args: any[]): SchedulerLike | undefined {\n return isScheduler(last(args)) ? args.pop() : undefined;\n}\n\nexport function popNumber(args: any[], defaultValue: number): number {\n return typeof last(args) === 'number' ? args.pop()! : defaultValue;\n}\n", "export const isArrayLike = ((x: any): x is ArrayLike => x && typeof x.length === 'number' && typeof x !== 'function');", "import { isFunction } from \"./isFunction\";\n\n/**\n * Tests to see if the object is \"thennable\".\n * @param value the object to test\n */\nexport function isPromise(value: any): value is PromiseLike {\n return isFunction(value?.then);\n}\n", "import { InteropObservable } from '../types';\nimport { observable as Symbol_observable } from '../symbol/observable';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being Observable (but not necessary an Rx Observable) */\nexport function isInteropObservable(input: any): input is InteropObservable {\n return isFunction(input[Symbol_observable]);\n}\n", "import { isFunction } from './isFunction';\n\nexport function isAsyncIterable(obj: any): obj is AsyncIterable {\n return Symbol.asyncIterator && isFunction(obj?.[Symbol.asyncIterator]);\n}\n", "/**\n * Creates the TypeError to throw if an invalid object is passed to `from` or `scheduled`.\n * @param input The object that was passed.\n */\nexport function createInvalidObservableTypeError(input: any) {\n // TODO: We should create error codes that can be looked up, so this can be less verbose.\n return new TypeError(\n `You provided ${\n input !== null && typeof input === 'object' ? 'an invalid object' : `'${input}'`\n } where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.`\n );\n}\n", "export function getSymbolIterator(): symbol {\n if (typeof Symbol !== 'function' || !Symbol.iterator) {\n return '@@iterator' as any;\n }\n\n return Symbol.iterator;\n}\n\nexport const iterator = getSymbolIterator();\n", "import { iterator as Symbol_iterator } from '../symbol/iterator';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being an Iterable */\nexport function isIterable(input: any): input is Iterable {\n return isFunction(input?.[Symbol_iterator]);\n}\n", "import { ReadableStreamLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport async function* readableStreamLikeToAsyncGenerator(readableStream: ReadableStreamLike): AsyncGenerator {\n const reader = readableStream.getReader();\n try {\n while (true) {\n const { value, done } = await reader.read();\n if (done) {\n return;\n }\n yield value!;\n }\n } finally {\n reader.releaseLock();\n }\n}\n\nexport function isReadableStreamLike(obj: any): obj is ReadableStreamLike {\n // We don't want to use instanceof checks because they would return\n // false for instances from another Realm, like an