From e83bd9a9ccf424bb9060e045c5bb74d2bd03eba5 Mon Sep 17 00:00:00 2001 From: Benjamin Pelletier Date: Wed, 26 Jul 2023 21:48:22 +0000 Subject: [PATCH] Add JSON Schema generation (#8) --- pyproject.toml | 2 +- requirements.txt | 1 + src/implicitdict/jsonschema.py | 205 +++++++++++++++++++++++++++++++++ tests/__init__.py | 0 tests/test_containers.py | 27 +---- tests/test_inheritance.py | 26 +---- tests/test_jsonschema.py | 94 +++++++++++++++ tests/test_mutability.py | 15 +-- tests/test_normal_usage.py | 18 ++- tests/test_optional.py | 38 ++---- tests/test_properties.py | 28 +---- tests/test_types.py | 162 ++++++++++++++++++++++++++ 12 files changed, 491 insertions(+), 125 deletions(-) create mode 100644 src/implicitdict/jsonschema.py create mode 100644 tests/__init__.py create mode 100644 tests/test_jsonschema.py create mode 100644 tests/test_types.py diff --git a/pyproject.toml b/pyproject.toml index a97e52f..31eed93 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", ] -dependencies = ["arrow", "pytimeparse"] +dependencies = ["arrow", "jsonschema", "pytimeparse"] [project.optional-dependencies] dev = ["pytest==5.0.0", "pytest-cov[all]", "black==21.10b0"] [project.urls] diff --git a/requirements.txt b/requirements.txt index a37d915..f140e6e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ arrow==1.2.3 +jsonschema==4.17.3 pytimeparse==1.1.8 diff --git a/src/implicitdict/jsonschema.py b/src/implicitdict/jsonschema.py new file mode 100644 index 0000000..422183a --- /dev/null +++ b/src/implicitdict/jsonschema.py @@ -0,0 +1,205 @@ +import inspect +from dataclasses import dataclass +from datetime import datetime +import enum +import json +import re +from typing import get_args, get_origin, get_type_hints, Dict, Literal, Optional, Type, Union, Tuple, Callable + +from . import ImplicitDict, _fullname, _get_fields, StringBasedDateTime, StringBasedTimeDelta + + +@dataclass +class SchemaVars(object): + name: str + """Unique name that can be used to reference this type/schema.""" + + path_to: Optional[Callable[[Type, Type], str]] = None + """Function to compute $ref path to schema describing the first type from the schema describing the second type""" + + schema_id: Optional[str] = None + """ID of the schema describing this type. Will be used to populate $schema.""" + + description: Optional[str] = None + """Description of this type/schema.""" + + +SchemaVarsResolver = Callable[[Type], SchemaVars] +"""Function producing the characteristics of a schema (SchemaVars) for a given Type.""" + +_implicitdict_doc = inspect.getdoc(ImplicitDict) + + +def make_json_schema( + schema_type: Type[ImplicitDict], + schema_vars_resolver: SchemaVarsResolver, + schema_repository: Dict[str, dict], +) -> None: + """Create JSON Schema for the specified schema type and all dependencies. + + Args: + schema_type: ImplicitDict subclass to produce JSON Schema for. + schema_vars_resolver: Mapping between Python Type and characteristics of the schema for that type. + schema_repository: Mapping from reference path (see reference_resolver) to JSON Schema for the corresponding + type. The schema for schema_type will be populated in this repository, along with all other nested types. + """ + schema_vars = schema_vars_resolver(schema_type) + if schema_vars.name in schema_repository: + return + + # Add placeholder to avoid recursive definition attempts while we're making this schema + schema_repository[schema_vars.name] = {"$generating": True} + + properties = {"$ref": {"type": "string", "description": "Path to content that replaces the $ref"}} + all_fields, optional_fields = _get_fields(schema_type) + required_fields = [] + hints = get_type_hints(schema_type) + field_docs = _field_docs_for(schema_type) + for field in all_fields: + if field in hints: + value_type = hints[field] + else: + # See if this field has a default + if hasattr(schema_type, field): + value_type = type(getattr(schema_type, field)) + else: + raise ValueError(f"Could not make JSON Schema for {_fullname(schema_type)} because field `{field}` does not have type hints nor default values") + + try: + properties[field], is_optional = _schema_for(value_type, schema_vars_resolver, schema_repository, schema_type) + if not is_optional and not hasattr(schema_type, field): + required_fields.append(field) + except NotImplementedError as e: + # Simply omit fields with types that we can't describe with jsonschema + print(f"Warning: Omitting {schema_type.__name__}.{field} from definition because: {e}") + continue + + if field in field_docs: + properties[field]["description"] = field_docs[field] + + schema = { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": properties + } + if schema_vars.schema_id is not None: + schema["$id"] = schema_vars.schema_id + + docs = inspect.getdoc(schema_type) + if docs != _implicitdict_doc: + if schema_vars.description is not None: + schema["description"] = docs + "\n\n" + schema_vars.description + else: + schema["description"] = docs + elif schema_vars.description is not None: + schema["description"] = schema_vars.description + + if required_fields: + required_fields.sort() + schema["required"] = required_fields + + schema_repository[schema_vars.name] = schema + + +def _schema_for(value_type: Type, schema_vars_resolver: SchemaVarsResolver, schema_repository: Dict[str, dict], context: Type) -> Tuple[dict, bool]: + """Get the JSON Schema representation of the value_type. + + Args: + value_type: Data type for which to return a JSON Schema. + schema_vars_resolver: Mapping from data type to information about the schema for that data type. + schema_repository: If the schema for this data type needs to refer to schemas for other data types, those data + types must already be present in this repository or else they will be added to it during this function. + context: The parent/top-level JSON Schema in which the schema for value_type is being included (affects the $ref + paths used to refer to external schemas). + + Returns: + * JSON Schema representation of the value_type + * Boolean indication of whether the value_type is optional as a field in an ImplicitDict. + E.g., _schema_for(Optional[float], ...) would indicate True because an Optional[float] field within an + ImplicitDict would be an optional field in that object. + """ + generic_type = get_origin(value_type) + if generic_type: + # Type is generic + arg_types = get_args(value_type) + if generic_type is list: + items_schema, _ = _schema_for(arg_types[0], schema_vars_resolver, schema_repository, context) + return {"type": "array", "items": items_schema}, False + + elif generic_type is dict: + schema = { + "type": "object", + "properties": { + "$ref": {"type": "string", "description": "Path to content that replaces the $ref"} + } + } + if len(arg_types) >= 2: + value_schema, _ = _schema_for(arg_types[1], schema_vars_resolver, schema_repository, context) + schema["additionalProperties"] = value_schema + return schema, False + + elif generic_type is Union and len(arg_types) == 2 and arg_types[1] is type(None): + # Type is an Optional declaration + subschema, _ = _schema_for(arg_types[0], schema_vars_resolver, schema_repository, context) + schema = json.loads(json.dumps(subschema)) + if "type" in schema: + if "null" not in schema["type"]: + schema["type"] = [schema["type"], "null"] + else: + schema = {"oneOf": [{"type": "null"}, schema]} + return schema, True + + elif generic_type is Literal and len(arg_types) == 1: + # Type is a Literal (parsed value must match specified value) + return {"type": "string", "enum": [arg_types[0]]}, False + + else: + raise NotImplementedError(f"Automatic JSON schema generation for {value_type} generic type is not yet implemented") + + schema_vars = schema_vars_resolver(value_type) + + if issubclass(value_type, ImplicitDict): + make_json_schema(value_type, schema_vars_resolver, schema_repository) + return {"$ref": schema_vars.path_to(value_type, context)}, False + + if value_type == float or issubclass(value_type, float): + return {"type": "number"}, False + + if value_type == int or issubclass(value_type, int): + return {"type": "integer"}, False + + if value_type == str or issubclass(value_type, str): + schema = {"type": "string"} + if issubclass(value_type, StringBasedDateTime): + schema["format"] = "date-time" + elif issubclass(value_type, StringBasedTimeDelta): + schema["format"] = "duration" + if issubclass(value_type, enum.Enum): + schema["enum"] = [v.value for v in value_type] + return schema, False + + if value_type == datetime or issubclass(value_type, datetime): + return {"type": "string", "format": "date-time"}, False + + if value_type == dict or issubclass(value_type, dict): + return {"type": "object"}, False + + raise NotImplementedError(f"Automatic JSON schema generation for {value_type} type is not yet implemented") + + +def _field_docs_for(t: Type[ImplicitDict]) -> Dict[str, str]: + # Curse Guido for rejecting PEP224! Fine, we'll do it ourselves. + result = {} + src = inspect.getsource(t) + doc_pattern = r"\n\s+([_a-zA-Z][_a-zA-Z0-9]*)(?:: [^\n]+)?\n(\s+)(?:\"\"\"|''')((?:.|\s)*?)(?:\"\"\"|''')" + for m in re.finditer(doc_pattern, src): + indent = m.group(2) + lines = m.group(3).split("\n") + for i in range(1, len(lines)): + if lines[i].startswith(indent): + lines[i] = lines[i][len(indent):] + while not lines[-1]: + lines = lines[0:-1] + docstring = "\n".join(lines) + result[m.group(1)] = docstring + return result diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_containers.py b/tests/test_containers.py index 2df4065..b73733c 100644 --- a/tests/test_containers.py +++ b/tests/test_containers.py @@ -1,31 +1,8 @@ -from typing import List, Optional - -from implicitdict import ImplicitDict - - -class MySpecialClass(str): - @property - def is_special(self) -> bool: - return True - - -class MyContainers(ImplicitDict): - single_value: MySpecialClass - value_list: List[MySpecialClass] - optional_list: Optional[List[MySpecialClass]] - optional_value_list: List[Optional[MySpecialClass]] - list_of_lists: List[List[MySpecialClass]] +from .test_types import ContainerData def test_container_item_value_casting(): - containers: MyContainers = ImplicitDict.parse( - { - "single_value": "foo", - "value_list": ["value1", "value2"], - "optional_list": ["bar"], - "optional_value_list": ["baz", None], - "list_of_lists": [["list1v1", "list1v2"], ["list2v1"]] - }, MyContainers) + containers: ContainerData = ContainerData.example_value() assert containers.single_value.is_special diff --git a/tests/test_inheritance.py b/tests/test_inheritance.py index f588cec..a655d34 100644 --- a/tests/test_inheritance.py +++ b/tests/test_inheritance.py @@ -1,32 +1,12 @@ import json -from typing import Optional from implicitdict import ImplicitDict - -class MyData(ImplicitDict): - foo: str - bar: int = 0 - baz: Optional[float] - has_default_baseclass: str = "In MyData" - - def hello(self) -> str: - return "MyData" - - def base_method(self) -> int: - return 123 - - -class MySubclass(MyData): - buzz: Optional[str] - has_default_subclass: str = "In MySubclass" - - def hello(self) -> str: - return "MySubclass" +from .test_types import InheritanceData, MySubclass def test_inheritance(): - data: MyData = ImplicitDict.parse({'foo': 'asdf', 'bar': 1}, MyData) + data: InheritanceData = InheritanceData.example_value() assert json.loads(json.dumps(data)) == {"foo": "asdf", "bar": 1, "has_default_baseclass": "In MyData"} assert data.hello() == "MyData" assert data.has_default_baseclass == "In MyData" @@ -56,7 +36,7 @@ def test_inheritance(): subclass.has_default_baseclass = "In MyData 3" subclass.has_default_subclass = "In MySubclass 3" - data2 = MyData(subclass) + data2 = InheritanceData(subclass) assert data2.foo == "asdf" assert data2.bar == 1 assert "baz" not in data2 diff --git a/tests/test_jsonschema.py b/tests/test_jsonschema.py new file mode 100644 index 0000000..b094d71 --- /dev/null +++ b/tests/test_jsonschema.py @@ -0,0 +1,94 @@ +import json +import os.path +from typing import Type + +import implicitdict.jsonschema +from implicitdict.jsonschema import SchemaVars +from implicitdict import ImplicitDict +import jsonschema + +from .test_types import ContainerData, InheritanceData, NestedDefinitionsData, NormalUsageData, OptionalData, PropertiesData, SpecialTypesData + + +def _resolver(t: Type) -> SchemaVars: + def path_to(t_dest: Type, t_src: Type) -> str: + return "#/definitions/" + t_dest.__module__ + t_dest.__qualname__ + + full_name = t.__module__ + t.__qualname__ + + return SchemaVars(name=full_name, path_to=path_to) + + +def _verify_schema_validation(obj, obj_type: Type[ImplicitDict]) -> None: + repo = {} + implicitdict.jsonschema.make_json_schema(obj_type, _resolver, repo) + + name = _resolver(obj_type).name + schema = repo[name] + del repo[name] + if repo: + schema["definitions"] = repo + + jsonschema.Draft202012Validator.check_schema(schema) + validator = jsonschema.Draft202012Validator(schema) + + pure_json = json.loads(json.dumps(obj)) + + error_messages = [] + for e in validator.iter_errors(pure_json): + errors = [e] + while errors: + this_error = errors.pop(0) + if this_error.context: + for child in this_error.context: + errors.append(child) + else: + error_messages.append(f"[{this_error.json_path}] {this_error.message}") + assert not error_messages, "\n".join(error_messages) + + +def test_basic_usage(): + data: NormalUsageData = ImplicitDict.parse({'foo': 'asdf', 'bar': 1}, NormalUsageData) + _verify_schema_validation(data, NormalUsageData) + + +def test_field_docstrings(): + repo = {} + implicitdict.jsonschema.make_json_schema(NormalUsageData, _resolver, repo) + name = _resolver(NormalUsageData).name + schema = repo[name] + props = schema["properties"] + + assert props["foo"]["description"] == "The foo characterizing the data." + assert props["bar"]["description"] == "The bar of the data.\n\nIndents should not be included in docstrings." + assert props["baz"]["description"] == "If this baz is specified, it provides additional information.\n\nFinal docstring newlines should be omitted." + + +def test_containers(): + containers: ContainerData = ContainerData.example_value() + _verify_schema_validation(containers, ContainerData) + + +def test_inheritance(): + data = InheritanceData.example_value() + _verify_schema_validation(data, InheritanceData) + + +def test_optional(): + for data in OptionalData.example_values().values(): + _verify_schema_validation(data, OptionalData) + + +def test_properties(): + data = PropertiesData.example_value() + _verify_schema_validation(data, PropertiesData) + + +def test_special_types(): + data = SpecialTypesData.example_value() + _verify_schema_validation(data, SpecialTypesData) + + +def test_nested_definitions(): + data = NestedDefinitionsData.example_value() + _verify_schema_validation(data, NestedDefinitionsData) diff --git a/tests/test_mutability.py b/tests/test_mutability.py index 31801cf..e48c3d0 100644 --- a/tests/test_mutability.py +++ b/tests/test_mutability.py @@ -1,20 +1,13 @@ -from typing import Optional, List - from implicitdict import ImplicitDict - -class MyData(ImplicitDict): - primitive: str - list_of_primitives: List[str] - generic_dict: dict - subtype: Optional["MyData"] +from .test_types import MutabilityData def test_mutability_from_constructor(): primitive = 'foobar' primitive_list = ['one', 'two'] generic_dict = {'level1': 'foo', 'level2': {'bar': 'baz'}} - data = MyData(primitive=primitive, list_of_primitives=primitive_list, generic_dict=generic_dict) + data = MutabilityData(primitive=primitive, list_of_primitives=primitive_list, generic_dict=generic_dict) assert data.primitive == primitive assert data.list_of_primitives == primitive_list assert data.generic_dict == generic_dict @@ -36,8 +29,8 @@ def test_mutability_from_parse(): primitive = 'foobar' primitive_list = ['one', 'two'] generic_dict = {'level1': 'foo', 'level2': {'bar': 'baz'}} - data_source = MyData(primitive=primitive, list_of_primitives=primitive_list, generic_dict=generic_dict) - data: MyData = ImplicitDict.parse(data_source, MyData) + data_source = MutabilityData(primitive=primitive, list_of_primitives=primitive_list, generic_dict=generic_dict) + data: MutabilityData = ImplicitDict.parse(data_source, MutabilityData) assert data.primitive == primitive assert data.list_of_primitives == primitive_list assert data.generic_dict == generic_dict diff --git a/tests/test_normal_usage.py b/tests/test_normal_usage.py index 4eb12b7..b6a0f29 100644 --- a/tests/test_normal_usage.py +++ b/tests/test_normal_usage.py @@ -5,16 +5,12 @@ from implicitdict import ImplicitDict, StringBasedDateTime, StringBasedTimeDelta - -class MyData(ImplicitDict): - foo: str - bar: int = 0 - baz: Optional[float] +from .test_types import NormalUsageData def test_basic_usage(): # Most basic usage is to parse a plain dict into an ImplicitDict... - data: MyData = ImplicitDict.parse({'foo': 'asdf', 'bar': 1}, MyData) + data: NormalUsageData = ImplicitDict.parse({'foo': 'asdf', 'bar': 1}, NormalUsageData) # ...and implicitly serialize the ImplicitDict to a plain dict assert json.dumps(data) == '{"foo": "asdf", "bar": 1}' @@ -31,18 +27,18 @@ def test_basic_usage(): assert data.baz == 0 # Optional fields can be omitted (fields with defaults are optional) - data = MyData(foo='asdf') + data = NormalUsageData(foo='asdf') assert json.loads(json.dumps(data)) == {'foo': 'asdf', 'bar': 0} # Optional fields can be specified - data = ImplicitDict.parse({'foo': 'asdf', 'baz': 1.23}, MyData) + data = ImplicitDict.parse({'foo': 'asdf', 'baz': 1.23}, NormalUsageData) assert json.loads(json.dumps(data)) == {'foo': 'asdf', 'bar': 0, 'baz': 1.23} assert 'baz' in data assert data.baz == 1.23 # Failing to specify a required field ("foo") raises a ValueError with pytest.raises(ValueError): - MyData(bar=1) + NormalUsageData(bar=1) class MyIntEnum(int, Enum): @@ -63,7 +59,7 @@ class Features(ImplicitDict): t_start: StringBasedDateTime my_duration: StringBasedTimeDelta my_literal: Literal['Must be this string'] - nested: Optional[MyData] + nested: Optional[NormalUsageData] def test_features(): @@ -106,7 +102,7 @@ def test_features(): class NestedStructures(ImplicitDict): - my_list: List[MyData] + my_list: List[NormalUsageData] my_list_2: List[List[int]] my_list_3: List[List[List[int]]] my_dict: Dict[str, List[float]] diff --git a/tests/test_optional.py b/tests/test_optional.py index efcf5b6..53663aa 100644 --- a/tests/test_optional.py +++ b/tests/test_optional.py @@ -1,27 +1,14 @@ import json -from typing import Optional import pytest from implicitdict import ImplicitDict - -class MyData(ImplicitDict): - required_field: str - optional_field1: Optional[str] - field_with_default: str = "default value" - optional_field2_with_none_default: Optional[str] = None - optional_field3_with_default: Optional[str] = "concrete default" +from .test_types import OptionalData def test_fully_defined(): - data = MyData( - required_field="foo1", - optional_field1="foo2", - field_with_default="foo3", - optional_field2_with_none_default="foo4", - optional_field3_with_default="foo5", - ) + data = OptionalData.example_values()["fully_defined"] assert "required_field" in data assert "optional_field1" in data assert "field_with_default" in data @@ -41,22 +28,15 @@ def test_fully_defined(): def test_over_defined(): - data = MyData( - required_field="foo1", - optional_field1="foo2", - field_with_default="foo3", - optional_field2_with_none_default="foo4", - optional_field3_with_default="foo5", - unknown_field="foo6", - ) + data = OptionalData.example_values()["over_defined"] d = json.loads(json.dumps(data)) d["another_unknown_field"] = {"third_unknown_field": "foo7"} - _ = ImplicitDict.parse(d, MyData) + _ = ImplicitDict.parse(d, OptionalData) def test_minimally_defined(): # An unspecified optional field will not be present in the object at all - data = MyData(required_field="foo1") + data = OptionalData.example_values()["minimally_defined"] assert "required_field" in data assert "optional_field1" not in data assert "field_with_default" in data @@ -77,7 +57,7 @@ def test_minimally_defined(): def test_provide_optional_field(): - data = MyData(required_field="foo1", optional_field1="foo2") + data = OptionalData.example_values()["provide_optional_field"] assert "required_field" in data assert "optional_field1" in data assert "field_with_default" in data @@ -95,7 +75,7 @@ def test_provide_optional_field(): def test_provide_optional_field_as_none(): # If an optional field with no default is explicitly provided as None, then that field will not be included in the object - data = MyData(required_field="foo1", optional_field1=None) + data = OptionalData.example_values()["provide_optional_field_as_none"] assert "required_field" in data assert "optional_field1" not in data # <-- assert "field_with_default" in data @@ -112,7 +92,7 @@ def test_provide_optional_field_as_none(): def test_provide_optional_field_with_none_default_as_none(): # If a field has a default value, the field will always be present in the object, even if that default value is None and the field is Optional - data = MyData(required_field="foo1", optional_field2_with_none_default=None) + data = OptionalData.example_values()["provide_optional_field_with_none_default_as_none"] assert "required_field" in data assert "optional_field1" not in data assert "field_with_default" in data @@ -129,7 +109,7 @@ def test_provide_optional_field_with_none_default_as_none(): def test_provide_optional_field_with_default_as_none(): # If a field has a default value, the field will always be present in the object - data = MyData(required_field="foo1", optional_field3_with_default=None) + data = OptionalData.example_values()["provide_optional_field_with_default_as_none"] assert "required_field" in data assert "optional_field1" not in data assert "field_with_default" in data diff --git a/tests/test_properties.py b/tests/test_properties.py index 515484e..add95e4 100644 --- a/tests/test_properties.py +++ b/tests/test_properties.py @@ -2,29 +2,7 @@ from implicitdict import ImplicitDict - -class MyData(ImplicitDict): - foo: str - - @property - def bar(self) -> str: - return self.foo + 'bar' - - def get_baz(self) -> str: - return self.foo + 'baz' - - def set_baz(self, value: str) -> None: - self.foo = value - - baz = property(get_baz, set_baz) - - @property - def booz(self) -> str: - return self.foo + 'booz' - - @booz.setter - def booz(self, value: str) -> None: - self.foo = value +from .test_types import PropertiesData def test_property_exclusion(): @@ -37,7 +15,7 @@ def test_property_exclusion(): setter. """ # Create class instance and ensure it works as expected - data = MyData(foo='foo') + data = PropertiesData.example_value() assert data.bar == 'foobar' assert data.baz == 'foobaz' assert data.booz == 'foobooz' @@ -49,7 +27,7 @@ def test_property_exclusion(): assert 'booz' not in obj # Ensure serialization can be deserialized to class instance - data: MyData = ImplicitDict.parse(obj, MyData) + data: PropertiesData = ImplicitDict.parse(obj, PropertiesData) # Ensure deserialized instance works as expected assert data.bar == 'foobar' diff --git a/tests/test_types.py b/tests/test_types.py new file mode 100644 index 0000000..a5b6a19 --- /dev/null +++ b/tests/test_types.py @@ -0,0 +1,162 @@ +import enum +from datetime import datetime +from typing import Optional, List + +from implicitdict import ImplicitDict, StringBasedDateTime, StringBasedTimeDelta + + +class MySpecialClass(str): + @property + def is_special(self) -> bool: + return True + + +class ContainerData(ImplicitDict): + single_value: MySpecialClass + value_list: List[MySpecialClass] + optional_list: Optional[List[MySpecialClass]] + optional_value_list: List[Optional[MySpecialClass]] + list_of_lists: List[List[MySpecialClass]] + + @staticmethod + def example_value(): + return ImplicitDict.parse( + { + "single_value": "foo", + "value_list": ["value1", "value2"], + "optional_list": ["bar"], + "optional_value_list": ["baz", None], + "list_of_lists": [["list1v1", "list1v2"], ["list2v1"]] + }, ContainerData) + + +class InheritanceData(ImplicitDict): + foo: str + bar: int = 0 + baz: Optional[float] + has_default_baseclass: str = "In MyData" + + def hello(self) -> str: + return "MyData" + + def base_method(self) -> int: + return 123 + + @staticmethod + def example_value(): + return ImplicitDict.parse({'foo': 'asdf', 'bar': 1}, InheritanceData) + + +class MySubclass(InheritanceData): + buzz: Optional[str] + has_default_subclass: str = "In MySubclass" + + def hello(self) -> str: + return "MySubclass" + + +class MutabilityData(ImplicitDict): + primitive: str + list_of_primitives: List[str] + generic_dict: dict + subtype: Optional["MutabilityData"] + + +class NormalUsageData(ImplicitDict): + foo: str + """The foo characterizing the data.""" + + bar: int = 0 + """The bar of the data. + + Indents should not be included in docstrings.""" + + baz: Optional[float] + """If this baz is specified, it provides additional information. + + Final docstring newlines should be omitted. + """ + + +class OptionalData(ImplicitDict): + required_field: str + optional_field1: Optional[str] + field_with_default: str = "default value" + optional_field2_with_none_default: Optional[str] = None + optional_field3_with_default: Optional[str] = "concrete default" + + @staticmethod + def example_values(): + return { + "fully_defined": OptionalData( + required_field="foo1", + optional_field1="foo2", + field_with_default="foo3", + optional_field2_with_none_default="foo4", + optional_field3_with_default="foo5", + ), + "over_defined": OptionalData( + required_field="foo1", + optional_field1="foo2", + field_with_default="foo3", + optional_field2_with_none_default="foo4", + optional_field3_with_default="foo5", + unknown_field="foo6", + ), + "minimally_defined": OptionalData(required_field="foo1"), + "provide_optional_field": OptionalData(required_field="foo1", optional_field1="foo2"), + "provide_optional_field_as_none": OptionalData(required_field="foo1", optional_field1=None), + "provide_optional_field_with_none_default_as_none": OptionalData(required_field="foo1", optional_field2_with_none_default=None), + "provide_optional_field_with_default_as_none": OptionalData(required_field="foo1", optional_field3_with_default=None) + } + + +class PropertiesData(ImplicitDict): + foo: str + + @property + def bar(self) -> str: + return self.foo + 'bar' + + def get_baz(self) -> str: + return self.foo + 'baz' + + def set_baz(self, value: str) -> None: + self.foo = value + + baz = property(get_baz, set_baz) + + @property + def booz(self) -> str: + return self.foo + 'booz' + + @booz.setter + def booz(self, value: str) -> None: + self.foo = value + + @staticmethod + def example_value(): + return PropertiesData(foo='foo') + + +class YesNo(str, enum.Enum): + YesOption = "Yes" + NoOption = "No" + + +class SpecialTypesData(ImplicitDict): + datetime: StringBasedDateTime + timedelta: StringBasedTimeDelta + yesno: YesNo + + @staticmethod + def example_value(): + return ImplicitDict.parse({"datetime": datetime.utcnow().isoformat(), "timedelta": "12h", "yesno": "Yes"}, SpecialTypesData) + + +class NestedDefinitionsData(ImplicitDict): + special_types: SpecialTypesData + + @staticmethod + def example_value(): + return ImplicitDict.parse({"special_types": {"datetime": datetime.utcnow().isoformat(), "timedelta": "12h", "yesno": "Yes"}}, NestedDefinitionsData)