From 7c013966612fce810e454b14d6d07980a9ad6aa2 Mon Sep 17 00:00:00 2001 From: Jakub Frejlach <jfrejlac@redhat.com> Date: Wed, 18 Dec 2024 18:15:17 +0100 Subject: [PATCH] Regenerate bindings --- osidb_bindings/bindings/.gitignore | 2 +- osidb_bindings/bindings/README.md | 55 +- osidb_bindings/bindings/pyproject.toml | 32 +- .../bindings/python_client/__init__.py | 8 +- .../bindings/python_client/api/__init__.py | 2 +- .../api/auth/auth_token_create.py | 185 +- .../api/auth/auth_token_refresh_create.py | 185 +- .../api/auth/auth_token_retrieve.py | 80 +- .../api/auth/auth_token_verify_create.py | 185 +- .../collectors_api_v1_status_retrieve.py | 84 +- .../collectors/collectors_healthy_retrieve.py | 80 +- .../api/collectors/collectors_retrieve.py | 80 +- .../exploits_api_v1_collect_update.py | 88 +- .../exploits_api_v1_cve_map_retrieve.py | 116 +- .../api/exploits/exploits_api_v1_epss_list.py | 122 +- .../exploits_api_v1_flaw_data_list.py | 122 +- .../exploits_api_v1_report_data_list.py | 126 +- .../exploits_api_v1_report_date_retrieve.py | 133 +- ...its_api_v1_report_explanations_retrieve.py | 106 +- ...exploits_api_v1_report_pending_retrieve.py | 114 +- .../exploits_api_v1_status_retrieve.py | 88 +- ...exploits_api_v1_supported_products_list.py | 126 +- .../osidb/osidb_api_v1_affects_bulk_create.py | 190 +- .../osidb_api_v1_affects_bulk_destroy.py | 97 +- .../osidb/osidb_api_v1_affects_bulk_update.py | 194 +- .../api/osidb/osidb_api_v1_affects_create.py | 177 +- ...osidb_api_v1_affects_cvss_scores_create.py | 195 +- ...sidb_api_v1_affects_cvss_scores_destroy.py | 121 +- .../osidb_api_v1_affects_cvss_scores_list.py | 655 ++- ...idb_api_v1_affects_cvss_scores_retrieve.py | 166 +- ...osidb_api_v1_affects_cvss_scores_update.py | 201 +- .../api/osidb/osidb_api_v1_affects_destroy.py | 111 +- .../api/osidb/osidb_api_v1_affects_list.py | 2458 +++++---- .../osidb/osidb_api_v1_affects_retrieve.py | 182 +- .../api/osidb/osidb_api_v1_affects_update.py | 195 +- .../api/osidb/osidb_api_v1_alerts_list.py | 288 +- .../api/osidb/osidb_api_v1_alerts_retrieve.py | 156 +- .../api/osidb/osidb_api_v1_audit_list.py | 196 +- .../api/osidb/osidb_api_v1_audit_retrieve.py | 95 +- .../api/osidb/osidb_api_v1_audit_update.py | 180 +- ...idb_api_v1_flaws_acknowledgments_create.py | 195 +- ...db_api_v1_flaws_acknowledgments_destroy.py | 121 +- ...osidb_api_v1_flaws_acknowledgments_list.py | 616 ++- ...b_api_v1_flaws_acknowledgments_retrieve.py | 166 +- ...idb_api_v1_flaws_acknowledgments_update.py | 201 +- .../osidb_api_v1_flaws_comments_create.py | 192 +- .../osidb/osidb_api_v1_flaws_comments_list.py | 266 +- .../osidb_api_v1_flaws_comments_retrieve.py | 163 +- .../api/osidb/osidb_api_v1_flaws_create.py | 177 +- .../osidb_api_v1_flaws_cvss_scores_create.py | 195 +- .../osidb_api_v1_flaws_cvss_scores_destroy.py | 121 +- .../osidb_api_v1_flaws_cvss_scores_list.py | 655 ++- ...osidb_api_v1_flaws_cvss_scores_retrieve.py | 166 +- .../osidb_api_v1_flaws_cvss_scores_update.py | 201 +- .../api/osidb/osidb_api_v1_flaws_list.py | 4518 +++++++++-------- ...db_api_v1_flaws_package_versions_create.py | 195 +- ...b_api_v1_flaws_package_versions_destroy.py | 121 +- ...sidb_api_v1_flaws_package_versions_list.py | 599 ++- ..._api_v1_flaws_package_versions_retrieve.py | 166 +- ...db_api_v1_flaws_package_versions_update.py | 201 +- .../osidb_api_v1_flaws_promote_create.py | 114 +- .../osidb_api_v1_flaws_references_create.py | 195 +- .../osidb_api_v1_flaws_references_destroy.py | 121 +- .../osidb_api_v1_flaws_references_list.py | 637 ++- .../osidb_api_v1_flaws_references_retrieve.py | 166 +- .../osidb_api_v1_flaws_references_update.py | 201 +- .../osidb/osidb_api_v1_flaws_reject_create.py | 196 +- .../api/osidb/osidb_api_v1_flaws_retrieve.py | 216 +- .../api/osidb/osidb_api_v1_flaws_update.py | 205 +- .../osidb/osidb_api_v1_manifest_retrieve.py | 84 +- .../api/osidb/osidb_api_v1_schema_retrieve.py | 138 +- .../api/osidb/osidb_api_v1_status_retrieve.py | 80 +- .../api/osidb/osidb_api_v1_trackers_create.py | 177 +- .../api/osidb/osidb_api_v1_trackers_list.py | 2047 ++++---- .../osidb/osidb_api_v1_trackers_retrieve.py | 186 +- .../api/osidb/osidb_api_v1_trackers_update.py | 195 +- .../api/osidb/osidb_healthy_retrieve.py | 80 +- .../api/osidb/osidb_whoami_retrieve.py | 80 +- .../trackers/trackers_api_v1_file_create.py | 173 +- ...orkflows_api_v1_workflows_adjust_create.py | 111 +- .../workflows_api_v1_workflows_retrieve.py | 84 +- .../workflows_api_v1_workflows_retrieve_2.py | 130 +- .../workflows/workflows_healthy_retrieve.py | 80 +- .../api/workflows/workflows_retrieve.py | 80 +- .../bindings/python_client/client.py | 12 +- .../bindings/python_client/errors.py | 16 + .../bindings/python_client/models/__init__.py | 216 +- .../bindings/python_client/models/affect.py | 422 +- .../models/affect_bulk_post_put_response.py | 57 +- .../python_client/models/affect_bulk_put.py | 273 +- .../python_client/models/affect_cvss.py | 137 +- .../python_client/models/affect_cvss_post.py | 186 +- .../python_client/models/affect_cvss_put.py | 193 +- .../python_client/models/affect_post.py | 415 +- .../models/affect_report_data.py | 83 +- .../python_client/models/affectedness_enum.py | 2 +- .../bindings/python_client/models/alert.py | 71 +- .../python_client/models/alert_type_enum.py | 2 +- .../bindings/python_client/models/audit.py | 119 +- .../models/auth_token_create_response_200.py | 36 +- .../auth_token_refresh_create_response_200.py | 32 +- .../auth_token_retrieve_response_200.py | 32 +- .../auth_token_verify_create_response_200.py | 30 +- ...ors_api_v1_status_retrieve_response_200.py | 80 +- ...s_retrieve_response_200_collectors_item.py | 152 +- ...rieve_response_200_collectors_item_data.py | 2 +- ...ponse_200_collectors_item_error_type_0.py} | 30 +- ...ieve_response_200_collectors_item_state.py | 2 +- ...ollectors_healthy_retrieve_response_200.py | 28 +- .../collectors_retrieve_response_200.py | 37 +- .../bindings/python_client/models/comment.py | 116 +- .../bindings/python_client/models/epss.py | 24 +- .../bindings/python_client/models/erratum.py | 85 +- .../models/exploit_only_report_data.py | 84 +- .../exploit_only_report_data_source_enum.py | 2 +- ...oits_api_v1_collect_update_response_200.py | 30 +- ...ts_api_v1_cve_map_retrieve_response_200.py | 51 +- ...i_v1_cve_map_retrieve_response_200_cves.py | 18 +- .../exploits_api_v1_epss_list_response_200.py | 116 +- ...oits_api_v1_flaw_data_list_response_200.py | 116 +- ...ts_api_v1_report_data_list_response_200.py | 116 +- ...pi_v1_report_date_retrieve_response_200.py | 176 +- ...rieve_response_200_action_required_item.py | 22 +- ...te_retrieve_response_200_no_action_item.py | 22 +- ...retrieve_response_200_not_relevant_item.py | 22 +- ...port_explanations_retrieve_response_200.py | 86 +- ...retrieve_response_200_explanations_item.py | 34 +- ...v1_report_pending_retrieve_response_200.py | 82 +- ...rieve_response_200_pending_actions_item.py | 30 +- ...its_api_v1_status_retrieve_response_200.py | 34 +- ...v1_supported_products_list_response_200.py | 116 +- .../bindings/python_client/models/flaw.py | 898 ++-- .../models/flaw_acknowledgment.py | 113 +- .../models/flaw_acknowledgment_post.py | 144 +- .../models/flaw_acknowledgment_put.py | 151 +- .../models/flaw_classification.py | 26 +- .../models/flaw_classification_state.py | 8 +- .../python_client/models/flaw_comment.py | 118 +- .../python_client/models/flaw_comment_post.py | 140 +- .../python_client/models/flaw_cvss.py | 137 +- .../python_client/models/flaw_cvss_post.py | 186 +- .../python_client/models/flaw_cvss_put.py | 193 +- .../models/flaw_package_version.py | 107 +- .../models/flaw_package_version_post.py | 128 +- .../models/flaw_package_version_put.py | 135 +- .../python_client/models/flaw_post.py | 891 ++-- .../models/flaw_post_classification.py | 26 +- .../models/flaw_post_classification_state.py | 8 +- .../python_client/models/flaw_reference.py | 138 +- .../models/flaw_reference_post.py | 168 +- .../models/flaw_reference_put.py | 175 +- .../python_client/models/flaw_report_data.py | 81 +- .../python_client/models/flaw_uuid_list.py | 77 +- .../python_client/models/flaw_version.py | 23 +- .../python_client/models/impact_enum.py | 4 +- .../python_client/models/issuer_enum.py | 2 +- .../models/major_incident_state_enum.py | 6 +- .../python_client/models/module_component.py | 72 +- .../models/nist_cvss_validation_enum.py | 2 +- ...api_v1_affects_bulk_create_response_200.py | 70 +- ...pi_v1_affects_bulk_destroy_response_200.py | 28 +- ...api_v1_affects_bulk_update_response_200.py | 70 +- ...sidb_api_v1_affects_create_response_201.py | 280 +- ...affects_cvss_scores_create_response_201.py | 144 +- ...ffects_cvss_scores_destroy_response_200.py | 28 +- ...1_affects_cvss_scores_list_response_200.py | 116 +- ...fects_cvss_scores_retrieve_response_200.py | 144 +- ...affects_cvss_scores_update_response_200.py | 144 +- ...idb_api_v1_affects_destroy_response_200.py | 28 +- .../osidb_api_v1_affects_list_affectedness.py | 2 +- .../osidb_api_v1_affects_list_flaw_impact.py | 2 +- .../osidb_api_v1_affects_list_flaw_source.py | 2 +- .../osidb_api_v1_affects_list_impact.py | 2 +- .../osidb_api_v1_affects_list_order_item.py | 74 +- .../osidb_api_v1_affects_list_resolution.py | 2 +- .../osidb_api_v1_affects_list_response_200.py | 116 +- ...db_api_v1_affects_retrieve_response_200.py | 280 +- ...sidb_api_v1_affects_update_response_200.py | 280 +- .../osidb_api_v1_alerts_list_response_200.py | 116 +- ...idb_api_v1_alerts_retrieve_response_200.py | 78 +- .../osidb_api_v1_audit_list_response_200.py | 116 +- ...sidb_api_v1_audit_retrieve_response_200.py | 61 +- .../osidb_api_v1_audit_update_response_200.py | 61 +- ...aws_acknowledgments_create_response_201.py | 119 +- ...ws_acknowledgments_destroy_response_200.py | 32 +- ...flaws_acknowledgments_list_response_200.py | 116 +- ...s_acknowledgments_retrieve_response_200.py | 123 +- ...aws_acknowledgments_update_response_200.py | 119 +- ...i_v1_flaws_comments_create_response_201.py | 125 +- ...api_v1_flaws_comments_list_response_200.py | 116 +- ...v1_flaws_comments_retrieve_response_200.py | 125 +- .../osidb_api_v1_flaws_create_response_201.py | 610 ++- ...1_flaws_cvss_scores_create_response_201.py | 144 +- ..._flaws_cvss_scores_destroy_response_200.py | 28 +- ..._v1_flaws_cvss_scores_list_response_200.py | 116 +- ...flaws_cvss_scores_retrieve_response_200.py | 144 +- ...1_flaws_cvss_scores_update_response_200.py | 144 +- ..._api_v1_flaws_list_affects_affectedness.py | 2 +- .../osidb_api_v1_flaws_list_affects_impact.py | 2 +- ...db_api_v1_flaws_list_affects_resolution.py | 2 +- .../models/osidb_api_v1_flaws_list_impact.py | 2 +- ..._api_v1_flaws_list_major_incident_state.py | 2 +- ..._api_v1_flaws_list_nist_cvss_validation.py | 2 +- .../osidb_api_v1_flaws_list_order_item.py | 122 +- ..._v1_flaws_list_requires_cve_description.py | 2 +- .../osidb_api_v1_flaws_list_response_200.py | 116 +- .../models/osidb_api_v1_flaws_list_source.py | 2 +- ...b_api_v1_flaws_list_workflow_state_item.py | 2 +- ...ws_package_versions_create_response_201.py | 117 +- ...s_package_versions_destroy_response_200.py | 32 +- ...laws_package_versions_list_response_200.py | 116 +- ..._package_versions_retrieve_response_200.py | 117 +- ...ws_package_versions_update_response_200.py | 117 +- ...pi_v1_flaws_promote_create_response_200.py | 28 +- ...v1_flaws_references_create_response_201.py | 144 +- ...1_flaws_references_destroy_response_200.py | 28 +- ...i_v1_flaws_references_list_response_200.py | 116 +- ..._flaws_references_retrieve_response_200.py | 144 +- ...v1_flaws_references_update_response_200.py | 144 +- ...api_v1_flaws_reject_create_response_200.py | 28 +- ...sidb_api_v1_flaws_retrieve_response_200.py | 610 ++- .../osidb_api_v1_flaws_update_response_200.py | 610 ++- ...b_api_v1_manifest_retrieve_response_200.py | 28 +- ...idb_api_v1_schema_retrieve_response_200.py | 68 +- ...idb_api_v1_status_retrieve_response_200.py | 70 +- ...status_retrieve_response_200_osidb_data.py | 22 +- ...tus_retrieve_response_200_osidb_service.py | 22 +- ...idb_api_v1_trackers_create_response_201.py | 192 +- ...i_v1_trackers_list_affects_affectedness.py | 2 +- ...pi_v1_trackers_list_affects_flaw_impact.py | 2 +- ...pi_v1_trackers_list_affects_flaw_source.py | 2 +- ...idb_api_v1_trackers_list_affects_impact.py | 2 +- ...api_v1_trackers_list_affects_resolution.py | 2 +- .../osidb_api_v1_trackers_list_order_item.py | 58 +- ...osidb_api_v1_trackers_list_response_200.py | 116 +- ...b_api_v1_trackers_retrieve_response_200.py | 194 +- ...idb_api_v1_trackers_update_response_200.py | 194 +- .../osidb_healthy_retrieve_response_200.py | 28 +- .../osidb_whoami_retrieve_response_200.py | 60 +- ...db_whoami_retrieve_response_200_profile.py | 24 +- .../bindings/python_client/models/package.py | 92 +- .../python_client/models/package_ver.py | 25 +- .../models/paginated_affect_cvss_list.py | 106 +- .../models/paginated_affect_list.py | 106 +- .../models/paginated_alert_list.py | 106 +- .../models/paginated_audit_list.py | 106 +- .../models/paginated_epss_list.py | 106 +- ...paginated_exploit_only_report_data_list.py | 106 +- .../paginated_flaw_acknowledgment_list.py | 106 +- .../models/paginated_flaw_comment_list.py | 106 +- .../models/paginated_flaw_cvss_list.py | 106 +- .../models/paginated_flaw_list.py | 106 +- .../paginated_flaw_package_version_list.py | 106 +- .../models/paginated_flaw_reference_list.py | 106 +- .../models/paginated_flaw_report_data_list.py | 106 +- .../paginated_supported_products_list.py | 106 +- .../models/paginated_tracker_list.py | 106 +- .../models/ps_stream_selection.py | 30 +- .../bindings/python_client/models/reject.py | 42 +- .../models/requires_cve_description_enum.py | 2 +- .../python_client/models/resolution_enum.py | 6 +- .../models/supported_products.py | 22 +- .../python_client/models/token_obtain_pair.py | 68 +- .../python_client/models/token_refresh.py | 50 +- .../python_client/models/token_verify.py | 39 +- .../bindings/python_client/models/tracker.py | 284 +- .../python_client/models/tracker_post.py | 277 +- .../models/tracker_report_data.py | 55 +- .../models/tracker_suggestion.py | 95 +- .../python_client/models/tracker_type.py | 2 +- ...rackers_api_v1_file_create_response_200.py | 104 +- ...v1_workflows_adjust_create_response_200.py | 28 +- ...pi_v1_workflows_retrieve_2_response_200.py | 28 +- ..._api_v1_workflows_retrieve_response_200.py | 28 +- ...workflows_healthy_retrieve_response_200.py | 28 +- .../models/workflows_retrieve_response_200.py | 28 +- .../bindings/python_client/types.py | 38 +- 277 files changed, 26413 insertions(+), 15737 deletions(-) create mode 100644 osidb_bindings/bindings/python_client/errors.py rename osidb_bindings/bindings/python_client/models/{collectors_api_v1_status_retrieve_response_200_collectors_item_error.py => collectors_api_v1_status_retrieve_response_200_collectors_item_error_type_0.py} (64%) diff --git a/osidb_bindings/bindings/.gitignore b/osidb_bindings/bindings/.gitignore index ed29cb9..79a2c3d 100644 --- a/osidb_bindings/bindings/.gitignore +++ b/osidb_bindings/bindings/.gitignore @@ -20,4 +20,4 @@ dmypy.json .idea/ /coverage.xml -/.coverage \ No newline at end of file +/.coverage diff --git a/osidb_bindings/bindings/README.md b/osidb_bindings/bindings/README.md index 8bd9c92..b052d04 100644 --- a/osidb_bindings/bindings/README.md +++ b/osidb_bindings/bindings/README.md @@ -25,9 +25,10 @@ from python_client.models import MyDataModel from python_client.api.my_tag import get_my_data_model from python_client.types import Response -my_data: MyDataModel = get_my_data_model.sync(client=client) -# or if you need more info (e.g. status_code) -response: Response[MyDataModel] = get_my_data_model.sync_detailed(client=client) +with client as client: + my_data: MyDataModel = get_my_data_model.sync(client=client) + # or if you need more info (e.g. status_code) + response: Response[MyDataModel] = get_my_data_model.sync_detailed(client=client) ``` Or do the same thing with an async version: @@ -37,8 +38,9 @@ from python_client.models import MyDataModel from python_client.api.my_tag import get_my_data_model from python_client.types import Response -my_data: MyDataModel = await get_my_data_model.asyncio(client=client) -response: Response[MyDataModel] = await get_my_data_model.asyncio_detailed(client=client) +async with client as client: + my_data: MyDataModel = await get_my_data_model.asyncio(client=client) + response: Response[MyDataModel] = await get_my_data_model.asyncio_detailed(client=client) ``` By default, when you're calling an HTTPS API it will attempt to verify that SSL is working correctly. Using certificate verification is highly recommended most of the time, but sometimes you may need to authenticate to a server (especially an internal server) using a custom certificate bundle. @@ -65,14 +67,49 @@ Things to know: 1. Every path/method combo becomes a Python module with four functions: 1. `sync`: Blocking request that returns parsed data (if successful) or `None` 1. `sync_detailed`: Blocking request that always returns a `Request`, optionally with `parsed` set if the request was successful. - 1. `asyncio`: Like `sync` but the async instead of blocking - 1. `asyncio_detailed`: Like `sync_detailed` by async instead of blocking + 1. `asyncio`: Like `sync` but async instead of blocking + 1. `asyncio_detailed`: Like `sync_detailed` but async instead of blocking 1. All path/query params, and bodies become method arguments. 1. If your endpoint had any tags on it, the first tag will be used as a module name for the function (my_tag above) 1. Any endpoint which did not have a tag will be in `python_client.api.default` -## Building / publishing this Client +## Advanced customizations + +There are more settings on the generated `Client` class which let you control more runtime behavior, check out the docstring on that class for more info. You can also customize the underlying `httpx.Client` or `httpx.AsyncClient` (depending on your use-case): + +```python +from python_client import Client + +def log_request(request): + print(f"Request event hook: {request.method} {request.url} - Waiting for response") + +def log_response(response): + request = response.request + print(f"Response event hook: {request.method} {request.url} - Status {response.status_code}") + +client = Client( + base_url="https://api.example.com", + httpx_args={"event_hooks": {"request": [log_request], "response": [log_response]}}, +) + +# Or get the underlying httpx client to modify directly with client.get_httpx_client() or client.get_async_httpx_client() +``` + +You can even set the httpx client directly, but beware that this will override any existing settings (e.g., base_url): + +```python +import httpx +from python_client import Client + +client = Client( + base_url="https://api.example.com", +) +# Note that base_url needs to be re-set, as would any shared cookies, headers, etc. +client.set_httpx_client(httpx.Client(base_url="https://api.example.com", proxies="http://localhost:8030")) +``` + +## Building / publishing this package This project uses [Poetry](https://python-poetry.org/) to manage dependencies and packaging. Here are the basics: 1. Update the metadata in pyproject.toml (e.g. authors, version) 1. If you're using a private repository, configure it with Poetry @@ -84,4 +121,4 @@ If you want to install this client into another project without publishing it (e 1. If that project **is using Poetry**, you can simply do `poetry add <path-to-this-client>` from that project 1. If that project is not using Poetry: 1. Build a wheel with `poetry build -f wheel` - 1. Install that wheel from the other project `pip install <path-to-wheel>` \ No newline at end of file + 1. Install that wheel from the other project `pip install <path-to-wheel>` diff --git a/osidb_bindings/bindings/pyproject.toml b/osidb_bindings/bindings/pyproject.toml index d4df99b..88de38b 100644 --- a/osidb_bindings/bindings/pyproject.toml +++ b/osidb_bindings/bindings/pyproject.toml @@ -1,39 +1,27 @@ [tool.poetry] name = "bindings" -version = "4.6.0" +version = "4.6.1" description = "A client library for accessing OSIDB API" - authors = [] - readme = "README.md" packages = [ {include = "python_client"}, ] include = ["CHANGELOG.md", "python_client/py.typed"] + [tool.poetry.dependencies] -python = "^3.6" -httpx = ">=0.15.4,<0.21.0" -attrs = ">=20.1.0,<22.0.0" +python = "^3.9" +httpx = ">=0.20.0,<0.28.0" +attrs = ">=21.3.0" python-dateutil = "^2.8.0" [build-system] -requires = ["poetry>=1.0"] -build-backend = "poetry.masonry.api" +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" -[tool.black] +[tool.ruff] line-length = 120 -target_version = ['py36', 'py37', 'py38'] -exclude = ''' -( - /( - | \.git - | \.venv - | \.mypy_cache - )/ -) -''' -[tool.isort] -line_length = 120 -profile = "black" \ No newline at end of file +[tool.ruff.lint] +select = ["F", "I", "UP"] diff --git a/osidb_bindings/bindings/python_client/__init__.py b/osidb_bindings/bindings/python_client/__init__.py index 005b186..15ef6db 100644 --- a/osidb_bindings/bindings/python_client/__init__.py +++ b/osidb_bindings/bindings/python_client/__init__.py @@ -1,2 +1,8 @@ -""" A client library for accessing OSIDB API """ +"""A client library for accessing OSIDB API""" + from .client import AuthenticatedClient, Client + +__all__ = ( + "AuthenticatedClient", + "Client", +) diff --git a/osidb_bindings/bindings/python_client/api/__init__.py b/osidb_bindings/bindings/python_client/api/__init__.py index dc035f4..81f9fa2 100644 --- a/osidb_bindings/bindings/python_client/api/__init__.py +++ b/osidb_bindings/bindings/python_client/api/__init__.py @@ -1 +1 @@ -""" Contains methods for accessing the API """ +"""Contains methods for accessing the API""" diff --git a/osidb_bindings/bindings/python_client/api/auth/auth_token_create.py b/osidb_bindings/bindings/python_client/api/auth/auth_token_create.py index aa9f9d2..a311b6a 100644 --- a/osidb_bindings/bindings/python_client/api/auth/auth_token_create.py +++ b/osidb_bindings/bindings/python_client/api/auth/auth_token_create.py @@ -1,48 +1,50 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import Client +from ...client import AuthenticatedClient, Client from ...models.auth_token_create_response_200 import AuthTokenCreateResponse200 from ...models.token_obtain_pair import TokenObtainPair from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = TokenObtainPair def _get_kwargs( *, - client: Client, - form_data: TokenObtainPair, - multipart_data: TokenObtainPair, - json_body: TokenObtainPair, -) -> Dict[str, Any]: - url = "{}/auth/token".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenObtainPair, + TokenObtainPair, + TokenObtainPair, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/auth/token", + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, TokenObtainPair): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[AuthTokenCreateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: AuthTokenCreateResponse200 if isinstance(_response_200, Unset): @@ -51,32 +53,47 @@ def _parse_response( response_200 = AuthTokenCreateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[AuthTokenCreateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, - client: Client, - form_data: TokenObtainPair, - multipart_data: TokenObtainPair, - json_body: TokenObtainPair, + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenObtainPair, + TokenObtainPair, + TokenObtainPair, + ], ) -> Response[AuthTokenCreateResponse200]: + """Takes a set of user credentials and returns an access and refresh JSON web + token pair to prove the authentication of those credentials. + + Args: + body (TokenObtainPair): + body (TokenObtainPair): + body (TokenObtainPair): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AuthTokenCreateResponse200] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -87,39 +104,68 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, - client: Client, - form_data: TokenObtainPair, - multipart_data: TokenObtainPair, - json_body: TokenObtainPair, + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenObtainPair, + TokenObtainPair, + TokenObtainPair, + ], ) -> Optional[AuthTokenCreateResponse200]: """Takes a set of user credentials and returns an access and refresh JSON web - token pair to prove the authentication of those credentials.""" + token pair to prove the authentication of those credentials. + + Args: + body (TokenObtainPair): + body (TokenObtainPair): + body (TokenObtainPair): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AuthTokenCreateResponse200 + """ return sync_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( +async def asyncio_detailed( *, - client: Client, - form_data: TokenObtainPair, - multipart_data: TokenObtainPair, - json_body: TokenObtainPair, + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenObtainPair, + TokenObtainPair, + TokenObtainPair, + ], ) -> Response[AuthTokenCreateResponse200]: + """Takes a set of user credentials and returns an access and refresh JSON web + token pair to prove the authentication of those credentials. + + Args: + body (TokenObtainPair): + body (TokenObtainPair): + body (TokenObtainPair): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AuthTokenCreateResponse200] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -130,24 +176,37 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, - client: Client, - form_data: TokenObtainPair, - multipart_data: TokenObtainPair, - json_body: TokenObtainPair, + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenObtainPair, + TokenObtainPair, + TokenObtainPair, + ], ) -> Optional[AuthTokenCreateResponse200]: """Takes a set of user credentials and returns an access and refresh JSON web - token pair to prove the authentication of those credentials.""" + token pair to prove the authentication of those credentials. + + Args: + body (TokenObtainPair): + body (TokenObtainPair): + body (TokenObtainPair): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AuthTokenCreateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/auth/auth_token_refresh_create.py b/osidb_bindings/bindings/python_client/api/auth/auth_token_refresh_create.py index e32a37d..58baf8b 100644 --- a/osidb_bindings/bindings/python_client/api/auth/auth_token_refresh_create.py +++ b/osidb_bindings/bindings/python_client/api/auth/auth_token_refresh_create.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import Client +from ...client import AuthenticatedClient, Client from ...models.auth_token_refresh_create_response_200 import ( AuthTokenRefreshCreateResponse200, ) @@ -10,41 +11,42 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = TokenRefresh def _get_kwargs( *, - client: Client, - form_data: TokenRefresh, - multipart_data: TokenRefresh, - json_body: TokenRefresh, -) -> Dict[str, Any]: - url = "{}/auth/token/refresh".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenRefresh, + TokenRefresh, + TokenRefresh, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/auth/token/refresh", + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, TokenRefresh): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[AuthTokenRefreshCreateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: AuthTokenRefreshCreateResponse200 if isinstance(_response_200, Unset): @@ -53,32 +55,47 @@ def _parse_response( response_200 = AuthTokenRefreshCreateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[AuthTokenRefreshCreateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, - client: Client, - form_data: TokenRefresh, - multipart_data: TokenRefresh, - json_body: TokenRefresh, + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenRefresh, + TokenRefresh, + TokenRefresh, + ], ) -> Response[AuthTokenRefreshCreateResponse200]: + """Takes a refresh type JSON web token and returns an access type JSON web + token if the refresh token is valid. + + Args: + body (TokenRefresh): + body (TokenRefresh): + body (TokenRefresh): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AuthTokenRefreshCreateResponse200] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -89,39 +106,68 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, - client: Client, - form_data: TokenRefresh, - multipart_data: TokenRefresh, - json_body: TokenRefresh, + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenRefresh, + TokenRefresh, + TokenRefresh, + ], ) -> Optional[AuthTokenRefreshCreateResponse200]: """Takes a refresh type JSON web token and returns an access type JSON web - token if the refresh token is valid.""" + token if the refresh token is valid. + + Args: + body (TokenRefresh): + body (TokenRefresh): + body (TokenRefresh): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AuthTokenRefreshCreateResponse200 + """ return sync_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( +async def asyncio_detailed( *, - client: Client, - form_data: TokenRefresh, - multipart_data: TokenRefresh, - json_body: TokenRefresh, + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenRefresh, + TokenRefresh, + TokenRefresh, + ], ) -> Response[AuthTokenRefreshCreateResponse200]: + """Takes a refresh type JSON web token and returns an access type JSON web + token if the refresh token is valid. + + Args: + body (TokenRefresh): + body (TokenRefresh): + body (TokenRefresh): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AuthTokenRefreshCreateResponse200] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -132,24 +178,37 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, - client: Client, - form_data: TokenRefresh, - multipart_data: TokenRefresh, - json_body: TokenRefresh, + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenRefresh, + TokenRefresh, + TokenRefresh, + ], ) -> Optional[AuthTokenRefreshCreateResponse200]: """Takes a refresh type JSON web token and returns an access type JSON web - token if the refresh token is valid.""" + token if the refresh token is valid. + + Args: + body (TokenRefresh): + body (TokenRefresh): + body (TokenRefresh): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AuthTokenRefreshCreateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/auth/auth_token_retrieve.py b/osidb_bindings/bindings/python_client/api/auth/auth_token_retrieve.py index 3d2fcdf..4d3a5e4 100644 --- a/osidb_bindings/bindings/python_client/api/auth/auth_token_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/auth/auth_token_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.auth_token_retrieve_response_200 import AuthTokenRetrieveResponse200 from ...types import UNSET, Response, Unset @@ -12,23 +13,19 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/auth/token".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/auth/token", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[AuthTokenRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: AuthTokenRetrieveResponse200 if isinstance(_response_200, Unset): @@ -37,17 +34,16 @@ def _parse_response( response_200 = AuthTokenRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[AuthTokenRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -55,6 +51,16 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[AuthTokenRetrieveResponse200]: + """Takes a kerberos ticket and returns an access and refresh JWT pair. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AuthTokenRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -67,24 +73,42 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[AuthTokenRetrieveResponse200]: - """Takes a kerberos ticket and returns an access and refresh JWT pair.""" + """Takes a kerberos ticket and returns an access and refresh JWT pair. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AuthTokenRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[AuthTokenRetrieveResponse200]: + """Takes a kerberos ticket and returns an access and refresh JWT pair. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AuthTokenRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -97,17 +121,25 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[AuthTokenRetrieveResponse200]: - """Takes a kerberos ticket and returns an access and refresh JWT pair.""" + """Takes a kerberos ticket and returns an access and refresh JWT pair. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AuthTokenRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/auth/auth_token_verify_create.py b/osidb_bindings/bindings/python_client/api/auth/auth_token_verify_create.py index f201e57..d76f018 100644 --- a/osidb_bindings/bindings/python_client/api/auth/auth_token_verify_create.py +++ b/osidb_bindings/bindings/python_client/api/auth/auth_token_verify_create.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import Client +from ...client import AuthenticatedClient, Client from ...models.auth_token_verify_create_response_200 import ( AuthTokenVerifyCreateResponse200, ) @@ -10,41 +11,42 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = TokenVerify def _get_kwargs( *, - client: Client, - form_data: TokenVerify, - multipart_data: TokenVerify, - json_body: TokenVerify, -) -> Dict[str, Any]: - url = "{}/auth/token/verify".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenVerify, + TokenVerify, + TokenVerify, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/auth/token/verify", + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, TokenVerify): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[AuthTokenVerifyCreateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: AuthTokenVerifyCreateResponse200 if isinstance(_response_200, Unset): @@ -53,32 +55,47 @@ def _parse_response( response_200 = AuthTokenVerifyCreateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[AuthTokenVerifyCreateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, - client: Client, - form_data: TokenVerify, - multipart_data: TokenVerify, - json_body: TokenVerify, + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenVerify, + TokenVerify, + TokenVerify, + ], ) -> Response[AuthTokenVerifyCreateResponse200]: + """Takes a token and indicates if it is valid. This view provides no + information about a token's fitness for a particular use. + + Args: + body (TokenVerify): + body (TokenVerify): + body (TokenVerify): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AuthTokenVerifyCreateResponse200] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -89,39 +106,68 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, - client: Client, - form_data: TokenVerify, - multipart_data: TokenVerify, - json_body: TokenVerify, + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenVerify, + TokenVerify, + TokenVerify, + ], ) -> Optional[AuthTokenVerifyCreateResponse200]: """Takes a token and indicates if it is valid. This view provides no - information about a token's fitness for a particular use.""" + information about a token's fitness for a particular use. + + Args: + body (TokenVerify): + body (TokenVerify): + body (TokenVerify): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AuthTokenVerifyCreateResponse200 + """ return sync_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( +async def asyncio_detailed( *, - client: Client, - form_data: TokenVerify, - multipart_data: TokenVerify, - json_body: TokenVerify, + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenVerify, + TokenVerify, + TokenVerify, + ], ) -> Response[AuthTokenVerifyCreateResponse200]: + """Takes a token and indicates if it is valid. This view provides no + information about a token's fitness for a particular use. + + Args: + body (TokenVerify): + body (TokenVerify): + body (TokenVerify): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AuthTokenVerifyCreateResponse200] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -132,24 +178,37 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, - client: Client, - form_data: TokenVerify, - multipart_data: TokenVerify, - json_body: TokenVerify, + client: Union[AuthenticatedClient, Client], + body: Union[ + TokenVerify, + TokenVerify, + TokenVerify, + ], ) -> Optional[AuthTokenVerifyCreateResponse200]: """Takes a token and indicates if it is valid. This view provides no - information about a token's fitness for a particular use.""" + information about a token's fitness for a particular use. + + Args: + body (TokenVerify): + body (TokenVerify): + body (TokenVerify): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AuthTokenVerifyCreateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/collectors/collectors_api_v1_status_retrieve.py b/osidb_bindings/bindings/python_client/api/collectors/collectors_api_v1_status_retrieve.py index dec951d..44654c6 100644 --- a/osidb_bindings/bindings/python_client/api/collectors/collectors_api_v1_status_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/collectors/collectors_api_v1_status_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.collectors_api_v1_status_retrieve_response_200 import ( CollectorsApiV1StatusRetrieveResponse200, ) @@ -14,44 +15,37 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/collectors/api/v1/status".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/collectors/api/v1/status", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[CollectorsApiV1StatusRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: CollectorsApiV1StatusRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = CollectorsApiV1StatusRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = CollectorsApiV1StatusRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[CollectorsApiV1StatusRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -59,6 +53,16 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[CollectorsApiV1StatusRetrieveResponse200]: + """get the overall status of all collectors and the collected data + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[CollectorsApiV1StatusRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -71,24 +75,42 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[CollectorsApiV1StatusRetrieveResponse200]: - """get the overall status of all collectors and the collected data""" + """get the overall status of all collectors and the collected data + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + CollectorsApiV1StatusRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[CollectorsApiV1StatusRetrieveResponse200]: + """get the overall status of all collectors and the collected data + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[CollectorsApiV1StatusRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -101,17 +123,25 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[CollectorsApiV1StatusRetrieveResponse200]: - """get the overall status of all collectors and the collected data""" + """get the overall status of all collectors and the collected data + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + CollectorsApiV1StatusRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/collectors/collectors_healthy_retrieve.py b/osidb_bindings/bindings/python_client/api/collectors/collectors_healthy_retrieve.py index 5938942..bf03291 100644 --- a/osidb_bindings/bindings/python_client/api/collectors/collectors_healthy_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/collectors/collectors_healthy_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.collectors_healthy_retrieve_response_200 import ( CollectorsHealthyRetrieveResponse200, ) @@ -14,23 +15,19 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/collectors/healthy".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/collectors/healthy", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[CollectorsHealthyRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: CollectorsHealthyRetrieveResponse200 if isinstance(_response_200, Unset): @@ -39,17 +36,16 @@ def _parse_response( response_200 = CollectorsHealthyRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[CollectorsHealthyRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -57,6 +53,16 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[CollectorsHealthyRetrieveResponse200]: + """unauthenticated health check API endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[CollectorsHealthyRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -69,24 +75,42 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[CollectorsHealthyRetrieveResponse200]: - """unauthenticated health check API endpoint""" + """unauthenticated health check API endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + CollectorsHealthyRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[CollectorsHealthyRetrieveResponse200]: + """unauthenticated health check API endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[CollectorsHealthyRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -99,17 +123,25 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[CollectorsHealthyRetrieveResponse200]: - """unauthenticated health check API endpoint""" + """unauthenticated health check API endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + CollectorsHealthyRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/collectors/collectors_retrieve.py b/osidb_bindings/bindings/python_client/api/collectors/collectors_retrieve.py index bdde580..7f957ae 100644 --- a/osidb_bindings/bindings/python_client/api/collectors/collectors_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/collectors/collectors_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.collectors_retrieve_response_200 import CollectorsRetrieveResponse200 from ...types import UNSET, Response, Unset @@ -12,23 +13,19 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/collectors/".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/collectors/", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[CollectorsRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: CollectorsRetrieveResponse200 if isinstance(_response_200, Unset): @@ -37,17 +34,16 @@ def _parse_response( response_200 = CollectorsRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[CollectorsRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -55,6 +51,16 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[CollectorsRetrieveResponse200]: + """index API endpoint listing available collector API endpoints + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[CollectorsRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -67,24 +73,42 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[CollectorsRetrieveResponse200]: - """index API endpoint listing available collector API endpoints""" + """index API endpoint listing available collector API endpoints + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + CollectorsRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[CollectorsRetrieveResponse200]: + """index API endpoint listing available collector API endpoints + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[CollectorsRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -97,17 +121,25 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[CollectorsRetrieveResponse200]: - """index API endpoint listing available collector API endpoints""" + """index API endpoint listing available collector API endpoints + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + CollectorsRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_collect_update.py b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_collect_update.py index 96f5ea6..9e78098 100644 --- a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_collect_update.py +++ b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_collect_update.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.exploits_api_v1_collect_update_response_200 import ( ExploitsApiV1CollectUpdateResponse200, ) @@ -14,44 +15,37 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/exploits/api/v1/collect".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/exploits/api/v1/collect", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[ExploitsApiV1CollectUpdateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: ExploitsApiV1CollectUpdateResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = ExploitsApiV1CollectUpdateResponse200.from_dict( - _response_200 - ) + response_200 = ExploitsApiV1CollectUpdateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[ExploitsApiV1CollectUpdateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -59,6 +53,18 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[ExploitsApiV1CollectUpdateResponse200]: + """API endpoint for re-collecting exploit data. + + **NOTE:** Currently for CISA data only, which is very small and collection is fast. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1CollectUpdateResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -71,7 +77,7 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( @@ -80,17 +86,37 @@ def sync( ) -> Optional[ExploitsApiV1CollectUpdateResponse200]: """API endpoint for re-collecting exploit data. - **NOTE:** Currently for CISA data only, which is very small and collection is fast.""" + **NOTE:** Currently for CISA data only, which is very small and collection is fast. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1CollectUpdateResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[ExploitsApiV1CollectUpdateResponse200]: + """API endpoint for re-collecting exploit data. + + **NOTE:** Currently for CISA data only, which is very small and collection is fast. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1CollectUpdateResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -103,19 +129,27 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[ExploitsApiV1CollectUpdateResponse200]: """API endpoint for re-collecting exploit data. - **NOTE:** Currently for CISA data only, which is very small and collection is fast.""" + **NOTE:** Currently for CISA data only, which is very small and collection is fast. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1CollectUpdateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_cve_map_retrieve.py b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_cve_map_retrieve.py index 05e68c9..1baa68e 100644 --- a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_cve_map_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_cve_map_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.exploits_api_v1_cve_map_retrieve_response_200 import ( ExploitsApiV1CveMapRetrieveResponse200, ) @@ -14,44 +15,37 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/exploits/api/v1/cve_map".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/exploits/api/v1/cve_map", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[ExploitsApiV1CveMapRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: ExploitsApiV1CveMapRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = ExploitsApiV1CveMapRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = ExploitsApiV1CveMapRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[ExploitsApiV1CveMapRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -59,6 +53,30 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[ExploitsApiV1CveMapRetrieveResponse200]: + r"""API endpoint for getting simple exploits information mapped to impacted CVEs. + + The Insights Vulnerability application needs this format. + + Format of results: + ``` + { + \"page_size\": <Number of CVEs on the page>, + \"cves\": { + \"CVE-2222-0001\": [<List of exploits>], + \"CVE-2222-0002\": [<List of exploits>], + ... + }, + } + ``` + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1CveMapRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -71,14 +89,14 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[ExploitsApiV1CveMapRetrieveResponse200]: - """API endpoint for getting simple exploits information mapped to impacted CVEs. + r"""API endpoint for getting simple exploits information mapped to impacted CVEs. The Insights Vulnerability application needs this format. @@ -92,17 +110,49 @@ def sync( ... }, } - ```""" + ``` + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1CveMapRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[ExploitsApiV1CveMapRetrieveResponse200]: + r"""API endpoint for getting simple exploits information mapped to impacted CVEs. + + The Insights Vulnerability application needs this format. + + Format of results: + ``` + { + \"page_size\": <Number of CVEs on the page>, + \"cves\": { + \"CVE-2222-0001\": [<List of exploits>], + \"CVE-2222-0002\": [<List of exploits>], + ... + }, + } + ``` + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1CveMapRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -115,14 +165,14 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[ExploitsApiV1CveMapRetrieveResponse200]: - """API endpoint for getting simple exploits information mapped to impacted CVEs. + r"""API endpoint for getting simple exploits information mapped to impacted CVEs. The Insights Vulnerability application needs this format. @@ -136,10 +186,18 @@ async def async_( ... }, } - ```""" + ``` + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1CveMapRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_epss_list.py b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_epss_list.py index 869b209..3ebd316 100644 --- a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_epss_list.py +++ b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_epss_list.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.exploits_api_v1_epss_list_response_200 import ( ExploitsApiV1EpssListResponse200, ) @@ -17,32 +18,30 @@ def _get_kwargs( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, -) -> Dict[str, Any]: - url = "{}/exploits/api/v1/epss".format( - client.base_url, - ) + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - headers: Dict[str, Any] = client.get_headers() + params["limit"] = limit + + params["offset"] = offset - params: Dict[str, Any] = { - "limit": limit, - "offset": offset, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/exploits/api/v1/epss", "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[ExploitsApiV1EpssListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: ExploitsApiV1EpssListResponse200 if isinstance(_response_200, Unset): @@ -51,26 +50,39 @@ def _parse_response( response_200 = ExploitsApiV1EpssListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[ExploitsApiV1EpssListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Response[ExploitsApiV1EpssListResponse200]: + """API endpoint for getting list of Red Hat relevant CVEs with their EPSS score. + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1EpssListResponse200] + """ + kwargs = _get_kwargs( client=client, limit=limit, @@ -85,16 +97,28 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Optional[ExploitsApiV1EpssListResponse200]: - """API endpoint for getting list of Red Hat relevant CVEs with their EPSS score.""" + """API endpoint for getting list of Red Hat relevant CVEs with their EPSS score. + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1EpssListResponse200 + """ return sync_detailed( client=client, @@ -103,12 +127,26 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Response[ExploitsApiV1EpssListResponse200]: + """API endpoint for getting list of Red Hat relevant CVEs with their EPSS score. + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1EpssListResponse200] + """ + kwargs = _get_kwargs( client=client, limit=limit, @@ -123,19 +161,31 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Optional[ExploitsApiV1EpssListResponse200]: - """API endpoint for getting list of Red Hat relevant CVEs with their EPSS score.""" + """API endpoint for getting list of Red Hat relevant CVEs with their EPSS score. + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1EpssListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, limit=limit, offset=offset, diff --git a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_flaw_data_list.py b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_flaw_data_list.py index c1b40dc..7d849ff 100644 --- a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_flaw_data_list.py +++ b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_flaw_data_list.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.exploits_api_v1_flaw_data_list_response_200 import ( ExploitsApiV1FlawDataListResponse200, ) @@ -17,32 +18,30 @@ def _get_kwargs( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, -) -> Dict[str, Any]: - url = "{}/exploits/api/v1/flaw_data".format( - client.base_url, - ) + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - headers: Dict[str, Any] = client.get_headers() + params["limit"] = limit + + params["offset"] = offset - params: Dict[str, Any] = { - "limit": limit, - "offset": offset, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/exploits/api/v1/flaw_data", "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[ExploitsApiV1FlawDataListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: ExploitsApiV1FlawDataListResponse200 if isinstance(_response_200, Unset): @@ -51,26 +50,39 @@ def _parse_response( response_200 = ExploitsApiV1FlawDataListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[ExploitsApiV1FlawDataListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Response[ExploitsApiV1FlawDataListResponse200]: + """Flaw, affect, and tracker data for Exploits + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1FlawDataListResponse200] + """ + kwargs = _get_kwargs( client=client, limit=limit, @@ -85,16 +97,28 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Optional[ExploitsApiV1FlawDataListResponse200]: - """Flaw, affect, and tracker data for Exploits""" + """Flaw, affect, and tracker data for Exploits + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1FlawDataListResponse200 + """ return sync_detailed( client=client, @@ -103,12 +127,26 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Response[ExploitsApiV1FlawDataListResponse200]: + """Flaw, affect, and tracker data for Exploits + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1FlawDataListResponse200] + """ + kwargs = _get_kwargs( client=client, limit=limit, @@ -123,19 +161,31 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Optional[ExploitsApiV1FlawDataListResponse200]: - """Flaw, affect, and tracker data for Exploits""" + """Flaw, affect, and tracker data for Exploits + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1FlawDataListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, limit=limit, offset=offset, diff --git a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_data_list.py b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_data_list.py index f116327..00ed86c 100644 --- a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_data_list.py +++ b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_data_list.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.exploits_api_v1_report_data_list_response_200 import ( ExploitsApiV1ReportDataListResponse200, ) @@ -17,62 +18,71 @@ def _get_kwargs( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, -) -> Dict[str, Any]: - url = "{}/exploits/api/v1/report_data".format( - client.base_url, - ) + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - headers: Dict[str, Any] = client.get_headers() + params["limit"] = limit + + params["offset"] = offset - params: Dict[str, Any] = { - "limit": limit, - "offset": offset, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/exploits/api/v1/report_data", "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[ExploitsApiV1ReportDataListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: ExploitsApiV1ReportDataListResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = ExploitsApiV1ReportDataListResponse200.from_dict( - _response_200 - ) + response_200 = ExploitsApiV1ReportDataListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[ExploitsApiV1ReportDataListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Response[ExploitsApiV1ReportDataListResponse200]: + """Export only the data required to generate the exploits report + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1ReportDataListResponse200] + """ + kwargs = _get_kwargs( client=client, limit=limit, @@ -87,16 +97,28 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Optional[ExploitsApiV1ReportDataListResponse200]: - """Export only the data required to generate the exploits report""" + """Export only the data required to generate the exploits report + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1ReportDataListResponse200 + """ return sync_detailed( client=client, @@ -105,12 +127,26 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Response[ExploitsApiV1ReportDataListResponse200]: + """Export only the data required to generate the exploits report + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1ReportDataListResponse200] + """ + kwargs = _get_kwargs( client=client, limit=limit, @@ -125,19 +161,31 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Optional[ExploitsApiV1ReportDataListResponse200]: - """Export only the data required to generate the exploits report""" + """Export only the data required to generate the exploits report + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1ReportDataListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, limit=limit, offset=offset, diff --git a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_date_retrieve.py b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_date_retrieve.py index 887b2e7..9e71abf 100644 --- a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_date_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_date_retrieve.py @@ -1,9 +1,10 @@ import datetime -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.exploits_api_v1_report_date_retrieve_response_200 import ( ExploitsApiV1ReportDateRetrieveResponse200, ) @@ -16,45 +17,39 @@ def _get_kwargs( date: datetime.date, *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/exploits/api/v1/report/date/{date}".format( - client.base_url, - date=date, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//exploits/api/v1/report/date/{date}".format( + date=date, + ), } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[ExploitsApiV1ReportDateRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: ExploitsApiV1ReportDateRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = ExploitsApiV1ReportDateRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = ExploitsApiV1ReportDateRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[ExploitsApiV1ReportDateRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -63,6 +58,34 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[ExploitsApiV1ReportDateRetrieveResponse200]: + r"""API endpoint for getting date based report for Incident Response. + + Format of results: + ``` + { + \"cutoff_date\": <Date>, + \"evaluated_cves\": <Number of new CVEs with exploits>, + \"action_required\": [<List of affects requiring action>], + \"no_action\": [<List of CVEs not requiring action with reason>], + \"not_relevant\": [<List of CVEs which are not in the database with reason>], + } + ``` + + **NOTE:** No pagination is performed on this endpoint as data is limited by date and is expected + to be fairly small. Also, because data is broken into three categories it is not + exactly obvious how to create pages. + + Args: + date (datetime.date): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1ReportDateRetrieveResponse200] + """ + kwargs = _get_kwargs( date=date, client=client, @@ -76,7 +99,7 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( @@ -84,7 +107,7 @@ def sync( *, client: AuthenticatedClient, ) -> Optional[ExploitsApiV1ReportDateRetrieveResponse200]: - """API endpoint for getting date based report for Incident Response. + r"""API endpoint for getting date based report for Incident Response. Format of results: ``` @@ -99,7 +122,18 @@ def sync( **NOTE:** No pagination is performed on this endpoint as data is limited by date and is expected to be fairly small. Also, because data is broken into three categories it is not - exactly obvious how to create pages.""" + exactly obvious how to create pages. + + Args: + date (datetime.date): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1ReportDateRetrieveResponse200 + """ return sync_detailed( date=date, @@ -107,11 +141,39 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( date: datetime.date, *, client: AuthenticatedClient, ) -> Response[ExploitsApiV1ReportDateRetrieveResponse200]: + r"""API endpoint for getting date based report for Incident Response. + + Format of results: + ``` + { + \"cutoff_date\": <Date>, + \"evaluated_cves\": <Number of new CVEs with exploits>, + \"action_required\": [<List of affects requiring action>], + \"no_action\": [<List of CVEs not requiring action with reason>], + \"not_relevant\": [<List of CVEs which are not in the database with reason>], + } + ``` + + **NOTE:** No pagination is performed on this endpoint as data is limited by date and is expected + to be fairly small. Also, because data is broken into three categories it is not + exactly obvious how to create pages. + + Args: + date (datetime.date): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1ReportDateRetrieveResponse200] + """ + kwargs = _get_kwargs( date=date, client=client, @@ -125,15 +187,15 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( date: datetime.date, *, client: AuthenticatedClient, ) -> Optional[ExploitsApiV1ReportDateRetrieveResponse200]: - """API endpoint for getting date based report for Incident Response. + r"""API endpoint for getting date based report for Incident Response. Format of results: ``` @@ -148,10 +210,21 @@ async def async_( **NOTE:** No pagination is performed on this endpoint as data is limited by date and is expected to be fairly small. Also, because data is broken into three categories it is not - exactly obvious how to create pages.""" + exactly obvious how to create pages. + + Args: + date (datetime.date): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1ReportDateRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( date=date, client=client, ) diff --git a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_explanations_retrieve.py b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_explanations_retrieve.py index 0d1023e..9270795 100644 --- a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_explanations_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_explanations_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.exploits_api_v1_report_explanations_retrieve_response_200 import ( ExploitsApiV1ReportExplanationsRetrieveResponse200, ) @@ -14,44 +15,37 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/exploits/api/v1/report/explanations".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/exploits/api/v1/report/explanations", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[ExploitsApiV1ReportExplanationsRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: ExploitsApiV1ReportExplanationsRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = ExploitsApiV1ReportExplanationsRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = ExploitsApiV1ReportExplanationsRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[ExploitsApiV1ReportExplanationsRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -59,6 +53,25 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[ExploitsApiV1ReportExplanationsRetrieveResponse200]: + r"""API endpoint for getting a report of all CVEs with exploit and their status + for Incident Response. + + Format of results: + ``` + { + \"page_size\": <Number of CVEs on the page>, + \"explanations\": [<List of CVEs with exploit together with current status explanation>], + } + ``` + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1ReportExplanationsRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -71,14 +84,14 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[ExploitsApiV1ReportExplanationsRetrieveResponse200]: - """API endpoint for getting a report of all CVEs with exploit and their status + r"""API endpoint for getting a report of all CVEs with exploit and their status for Incident Response. Format of results: @@ -87,17 +100,44 @@ def sync( \"page_size\": <Number of CVEs on the page>, \"explanations\": [<List of CVEs with exploit together with current status explanation>], } - ```""" + ``` + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1ReportExplanationsRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[ExploitsApiV1ReportExplanationsRetrieveResponse200]: + r"""API endpoint for getting a report of all CVEs with exploit and their status + for Incident Response. + + Format of results: + ``` + { + \"page_size\": <Number of CVEs on the page>, + \"explanations\": [<List of CVEs with exploit together with current status explanation>], + } + ``` + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1ReportExplanationsRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -110,14 +150,14 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[ExploitsApiV1ReportExplanationsRetrieveResponse200]: - """API endpoint for getting a report of all CVEs with exploit and their status + r"""API endpoint for getting a report of all CVEs with exploit and their status for Incident Response. Format of results: @@ -126,10 +166,18 @@ async def async_( \"page_size\": <Number of CVEs on the page>, \"explanations\": [<List of CVEs with exploit together with current status explanation>], } - ```""" + ``` + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1ReportExplanationsRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_pending_retrieve.py b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_pending_retrieve.py index 6acc1a7..a660609 100644 --- a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_pending_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_report_pending_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.exploits_api_v1_report_pending_retrieve_response_200 import ( ExploitsApiV1ReportPendingRetrieveResponse200, ) @@ -14,44 +15,37 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/exploits/api/v1/report/pending".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/exploits/api/v1/report/pending", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[ExploitsApiV1ReportPendingRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: ExploitsApiV1ReportPendingRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = ExploitsApiV1ReportPendingRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = ExploitsApiV1ReportPendingRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[ExploitsApiV1ReportPendingRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -59,6 +53,29 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[ExploitsApiV1ReportPendingRetrieveResponse200]: + r"""API endpoint for getting a report of pending actions for Incident Response. + + Format of results: + ``` + { + \"pending_actions\": [<List of affects requiring action>], + \"pending_actions_count\": <Number of affects requiring action>, + } + ``` + + **NOTE:** No pagination is performed on this endpoint as it is expected that the size of + the list of pending actions will be mostly stable. Also, the paging cannot be done on + the query level, as additional analysis of every exploit is required before a decision + to include it in this report is done. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1ReportPendingRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -71,14 +88,14 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[ExploitsApiV1ReportPendingRetrieveResponse200]: - """API endpoint for getting a report of pending actions for Incident Response. + r"""API endpoint for getting a report of pending actions for Incident Response. Format of results: ``` @@ -91,17 +108,48 @@ def sync( **NOTE:** No pagination is performed on this endpoint as it is expected that the size of the list of pending actions will be mostly stable. Also, the paging cannot be done on the query level, as additional analysis of every exploit is required before a decision - to include it in this report is done.""" + to include it in this report is done. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1ReportPendingRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[ExploitsApiV1ReportPendingRetrieveResponse200]: + r"""API endpoint for getting a report of pending actions for Incident Response. + + Format of results: + ``` + { + \"pending_actions\": [<List of affects requiring action>], + \"pending_actions_count\": <Number of affects requiring action>, + } + ``` + + **NOTE:** No pagination is performed on this endpoint as it is expected that the size of + the list of pending actions will be mostly stable. Also, the paging cannot be done on + the query level, as additional analysis of every exploit is required before a decision + to include it in this report is done. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1ReportPendingRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -114,14 +162,14 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[ExploitsApiV1ReportPendingRetrieveResponse200]: - """API endpoint for getting a report of pending actions for Incident Response. + r"""API endpoint for getting a report of pending actions for Incident Response. Format of results: ``` @@ -134,10 +182,18 @@ async def async_( **NOTE:** No pagination is performed on this endpoint as it is expected that the size of the list of pending actions will be mostly stable. Also, the paging cannot be done on the query level, as additional analysis of every exploit is required before a decision - to include it in this report is done.""" + to include it in this report is done. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1ReportPendingRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_status_retrieve.py b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_status_retrieve.py index 0d7b4b2..2b83ad2 100644 --- a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_status_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_status_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.exploits_api_v1_status_retrieve_response_200 import ( ExploitsApiV1StatusRetrieveResponse200, ) @@ -14,44 +15,37 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/exploits/api/v1/status".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/exploits/api/v1/status", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[ExploitsApiV1StatusRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: ExploitsApiV1StatusRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = ExploitsApiV1StatusRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = ExploitsApiV1StatusRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[ExploitsApiV1StatusRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -59,6 +53,18 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[ExploitsApiV1StatusRetrieveResponse200]: + """API endpoint for getting basic information about exploits in the database. + + **NOTE:** Everyone is allowed to see basic information. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1StatusRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -71,7 +77,7 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( @@ -80,17 +86,37 @@ def sync( ) -> Optional[ExploitsApiV1StatusRetrieveResponse200]: """API endpoint for getting basic information about exploits in the database. - **NOTE:** Everyone is allowed to see basic information.""" + **NOTE:** Everyone is allowed to see basic information. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1StatusRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[ExploitsApiV1StatusRetrieveResponse200]: + """API endpoint for getting basic information about exploits in the database. + + **NOTE:** Everyone is allowed to see basic information. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1StatusRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -103,19 +129,27 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[ExploitsApiV1StatusRetrieveResponse200]: """API endpoint for getting basic information about exploits in the database. - **NOTE:** Everyone is allowed to see basic information.""" + **NOTE:** Everyone is allowed to see basic information. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1StatusRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_supported_products_list.py b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_supported_products_list.py index a11f2f0..742fefe 100644 --- a/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_supported_products_list.py +++ b/osidb_bindings/bindings/python_client/api/exploits/exploits_api_v1_supported_products_list.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.exploits_api_v1_supported_products_list_response_200 import ( ExploitsApiV1SupportedProductsListResponse200, ) @@ -17,62 +18,71 @@ def _get_kwargs( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, -) -> Dict[str, Any]: - url = "{}/exploits/api/v1/supported-products".format( - client.base_url, - ) + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - headers: Dict[str, Any] = client.get_headers() + params["limit"] = limit + + params["offset"] = offset - params: Dict[str, Any] = { - "limit": limit, - "offset": offset, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/exploits/api/v1/supported-products", "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[ExploitsApiV1SupportedProductsListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: ExploitsApiV1SupportedProductsListResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = ExploitsApiV1SupportedProductsListResponse200.from_dict( - _response_200 - ) + response_200 = ExploitsApiV1SupportedProductsListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[ExploitsApiV1SupportedProductsListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Response[ExploitsApiV1SupportedProductsListResponse200]: + """API endpoint for getting a list of all supported products. + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1SupportedProductsListResponse200] + """ + kwargs = _get_kwargs( client=client, limit=limit, @@ -87,16 +97,28 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Optional[ExploitsApiV1SupportedProductsListResponse200]: - """API endpoint for getting a list of all supported products.""" + """API endpoint for getting a list of all supported products. + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1SupportedProductsListResponse200 + """ return sync_detailed( client=client, @@ -105,12 +127,26 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Response[ExploitsApiV1SupportedProductsListResponse200]: + """API endpoint for getting a list of all supported products. + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[ExploitsApiV1SupportedProductsListResponse200] + """ + kwargs = _get_kwargs( client=client, limit=limit, @@ -125,19 +161,31 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, ) -> Optional[ExploitsApiV1SupportedProductsListResponse200]: - """API endpoint for getting a list of all supported products.""" + """API endpoint for getting a list of all supported products. + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + ExploitsApiV1SupportedProductsListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, limit=limit, offset=offset, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_bulk_create.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_bulk_create.py index 9c0bf7d..a0e50c1 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_bulk_create.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_bulk_create.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, List, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.affect_post import AffectPost from ...models.osidb_api_v1_affects_bulk_create_response_200 import ( OsidbApiV1AffectsBulkCreateResponse200, @@ -10,86 +11,97 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} -REQUEST_BODY_TYPE = List[AffectPost] + +REQUEST_BODY_TYPE = list["AffectPost"] def _get_kwargs( *, client: AuthenticatedClient, - multipart_data: List[AffectPost], - json_body: List[AffectPost], -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects/bulk".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - json_json_body: List[Dict[str, Any]] = UNSET - if not isinstance(json_body, Unset): - json_json_body = [] - for json_body_item_data in json_body: - json_body_item: Dict[str, Any] = UNSET - if not isinstance(json_body_item_data, Unset): - json_body_item = json_body_item_data.to_dict() + body: Union[ + list["AffectPost"], + list["AffectPost"], + list["AffectPost"], + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/affects/bulk", + } - json_json_body.append(json_body_item) + if isinstance(body, list["AffectPost"]): + _json_body: list[dict[str, Any]] = UNSET + if not isinstance(body, Unset): + _json_body = [] + for body_item_data in body: + body_item: dict[str, Any] = UNSET + if not isinstance(body_item_data, Unset): + body_item = body_item_data.to_dict() - multipart_multipart_data: List[Dict[str, Any]] = UNSET - if not isinstance(multipart_data, Unset): - multipart_multipart_data = [] - for multipart_data_item_data in multipart_data: - multipart_data_item: Dict[str, Any] = UNSET - if not isinstance(multipart_data_item_data, Unset): - multipart_data_item = multipart_data_item_data.to_dict() + _json_body.append(body_item) - multipart_multipart_data.append(multipart_data_item) + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": json_json_body, - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsBulkCreateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AffectsBulkCreateResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1AffectsBulkCreateResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1AffectsBulkCreateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsBulkCreateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - multipart_data: List[AffectPost], - json_body: List[AffectPost], + body: Union[ + list["AffectPost"], + list["AffectPost"], + list["AffectPost"], + ], ) -> Response[OsidbApiV1AffectsBulkCreateResponse200]: + """Bulk create endpoint. Expects a list of dict Affect objects. + + Args: + bugzilla_api_key (str): + body (list['AffectPost']): + body (list['AffectPost']): + body (list['AffectPost']): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsBulkCreateResponse200] + """ + kwargs = _get_kwargs( client=client, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -100,34 +112,68 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - multipart_data: List[AffectPost], - json_body: List[AffectPost], + body: Union[ + list["AffectPost"], + list["AffectPost"], + list["AffectPost"], + ], ) -> Optional[OsidbApiV1AffectsBulkCreateResponse200]: - """Bulk create endpoint. Expects a list of dict Affect objects.""" + """Bulk create endpoint. Expects a list of dict Affect objects. + + Args: + bugzilla_api_key (str): + body (list['AffectPost']): + body (list['AffectPost']): + body (list['AffectPost']): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsBulkCreateResponse200 + """ return sync_detailed( client=client, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - multipart_data: List[AffectPost], - json_body: List[AffectPost], + body: Union[ + list["AffectPost"], + list["AffectPost"], + list["AffectPost"], + ], ) -> Response[OsidbApiV1AffectsBulkCreateResponse200]: + """Bulk create endpoint. Expects a list of dict Affect objects. + + Args: + bugzilla_api_key (str): + body (list['AffectPost']): + body (list['AffectPost']): + body (list['AffectPost']): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsBulkCreateResponse200] + """ + kwargs = _get_kwargs( client=client, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -138,21 +184,37 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - multipart_data: List[AffectPost], - json_body: List[AffectPost], + body: Union[ + list["AffectPost"], + list["AffectPost"], + list["AffectPost"], + ], ) -> Optional[OsidbApiV1AffectsBulkCreateResponse200]: - """Bulk create endpoint. Expects a list of dict Affect objects.""" + """Bulk create endpoint. Expects a list of dict Affect objects. + + Args: + bugzilla_api_key (str): + body (list['AffectPost']): + body (list['AffectPost']): + body (list['AffectPost']): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsBulkCreateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_bulk_destroy.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_bulk_destroy.py index 68909da..4180de0 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_bulk_destroy.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_bulk_destroy.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_affects_bulk_destroy_response_200 import ( OsidbApiV1AffectsBulkDestroyResponse200, ) @@ -14,44 +15,40 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects/bulk".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/affects/bulk", } + _kwargs["headers"] = headers + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsBulkDestroyResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AffectsBulkDestroyResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1AffectsBulkDestroyResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1AffectsBulkDestroyResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsBulkDestroyResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -59,6 +56,19 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[OsidbApiV1AffectsBulkDestroyResponse200]: + """Bulk delete endpoint. Expects a list of Affect uuids. + + Args: + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsBulkDestroyResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -71,24 +81,48 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1AffectsBulkDestroyResponse200]: - """Bulk delete endpoint. Expects a list of Affect uuids.""" + """Bulk delete endpoint. Expects a list of Affect uuids. + + Args: + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsBulkDestroyResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[OsidbApiV1AffectsBulkDestroyResponse200]: + """Bulk delete endpoint. Expects a list of Affect uuids. + + Args: + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsBulkDestroyResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -101,17 +135,28 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1AffectsBulkDestroyResponse200]: - """Bulk delete endpoint. Expects a list of Affect uuids.""" + """Bulk delete endpoint. Expects a list of Affect uuids. + + Args: + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsBulkDestroyResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_bulk_update.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_bulk_update.py index e67ccfe..984d14a 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_bulk_update.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_bulk_update.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, List, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.affect_bulk_put import AffectBulkPut from ...models.osidb_api_v1_affects_bulk_update_response_200 import ( OsidbApiV1AffectsBulkUpdateResponse200, @@ -10,86 +11,98 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} -REQUEST_BODY_TYPE = List[AffectBulkPut] + +REQUEST_BODY_TYPE = list["AffectBulkPut"] def _get_kwargs( *, client: AuthenticatedClient, - multipart_data: List[AffectBulkPut], - json_body: List[AffectBulkPut], -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects/bulk".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - json_json_body: List[Dict[str, Any]] = UNSET - if not isinstance(json_body, Unset): - json_json_body = [] - for json_body_item_data in json_body: - json_body_item: Dict[str, Any] = UNSET - if not isinstance(json_body_item_data, Unset): - json_body_item = json_body_item_data.to_dict() + body: Union[ + list["AffectBulkPut"], + list["AffectBulkPut"], + list["AffectBulkPut"], + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/affects/bulk", + } - json_json_body.append(json_body_item) + if isinstance(body, list["AffectBulkPut"]): + _json_body: list[dict[str, Any]] = UNSET + if not isinstance(body, Unset): + _json_body = [] + for body_item_data in body: + body_item: dict[str, Any] = UNSET + if not isinstance(body_item_data, Unset): + body_item = body_item_data.to_dict() - multipart_multipart_data: List[Dict[str, Any]] = UNSET - if not isinstance(multipart_data, Unset): - multipart_multipart_data = [] - for multipart_data_item_data in multipart_data: - multipart_data_item: Dict[str, Any] = UNSET - if not isinstance(multipart_data_item_data, Unset): - multipart_data_item = multipart_data_item_data.to_dict() + _json_body.append(body_item) - multipart_multipart_data.append(multipart_data_item) + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": json_json_body, - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsBulkUpdateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AffectsBulkUpdateResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1AffectsBulkUpdateResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1AffectsBulkUpdateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsBulkUpdateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - multipart_data: List[AffectBulkPut], - json_body: List[AffectBulkPut], + body: Union[ + list["AffectBulkPut"], + list["AffectBulkPut"], + list["AffectBulkPut"], + ], ) -> Response[OsidbApiV1AffectsBulkUpdateResponse200]: + """Bulk update endpoint. Expects a list of dict Affect objects. + + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (list['AffectBulkPut']): + body (list['AffectBulkPut']): + body (list['AffectBulkPut']): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsBulkUpdateResponse200] + """ + kwargs = _get_kwargs( client=client, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.put( @@ -100,34 +113,70 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - multipart_data: List[AffectBulkPut], - json_body: List[AffectBulkPut], + body: Union[ + list["AffectBulkPut"], + list["AffectBulkPut"], + list["AffectBulkPut"], + ], ) -> Optional[OsidbApiV1AffectsBulkUpdateResponse200]: - """Bulk update endpoint. Expects a list of dict Affect objects.""" + """Bulk update endpoint. Expects a list of dict Affect objects. + + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (list['AffectBulkPut']): + body (list['AffectBulkPut']): + body (list['AffectBulkPut']): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsBulkUpdateResponse200 + """ return sync_detailed( client=client, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - multipart_data: List[AffectBulkPut], - json_body: List[AffectBulkPut], + body: Union[ + list["AffectBulkPut"], + list["AffectBulkPut"], + list["AffectBulkPut"], + ], ) -> Response[OsidbApiV1AffectsBulkUpdateResponse200]: + """Bulk update endpoint. Expects a list of dict Affect objects. + + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (list['AffectBulkPut']): + body (list['AffectBulkPut']): + body (list['AffectBulkPut']): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsBulkUpdateResponse200] + """ + kwargs = _get_kwargs( client=client, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().put( @@ -138,21 +187,38 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - multipart_data: List[AffectBulkPut], - json_body: List[AffectBulkPut], + body: Union[ + list["AffectBulkPut"], + list["AffectBulkPut"], + list["AffectBulkPut"], + ], ) -> Optional[OsidbApiV1AffectsBulkUpdateResponse200]: - """Bulk update endpoint. Expects a list of dict Affect objects.""" + """Bulk update endpoint. Expects a list of dict Affect objects. + + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (list['AffectBulkPut']): + body (list['AffectBulkPut']): + body (list['AffectBulkPut']): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsBulkUpdateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_create.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_create.py index b98bd1f..43e0ca2 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_create.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_create.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.affect_post import AffectPost from ...models.osidb_api_v1_affects_create_response_201 import ( OsidbApiV1AffectsCreateResponse201, @@ -10,41 +11,42 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = AffectPost def _get_kwargs( *, client: AuthenticatedClient, - form_data: AffectPost, - multipart_data: AffectPost, - json_body: AffectPost, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects".format( - client.base_url, - ) + body: Union[ + AffectPost, + AffectPost, + AffectPost, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/affects", + } - headers: Dict[str, Any] = client.get_headers() + if isinstance(body, AffectPost): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() - - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsCreateResponse201]: if response.status_code == 201: + # } _response_201 = response.json() response_201: OsidbApiV1AffectsCreateResponse201 if isinstance(_response_201, Unset): @@ -53,32 +55,47 @@ def _parse_response( response_201 = OsidbApiV1AffectsCreateResponse201.from_dict(_response_201) return response_201 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsCreateResponse201]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - form_data: AffectPost, - multipart_data: AffectPost, - json_body: AffectPost, + body: Union[ + AffectPost, + AffectPost, + AffectPost, + ], ) -> Response[OsidbApiV1AffectsCreateResponse201]: + """ + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (AffectPost): Affect serializer + body (AffectPost): Affect serializer + body (AffectPost): Affect serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsCreateResponse201] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -89,38 +106,68 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - form_data: AffectPost, - multipart_data: AffectPost, - json_body: AffectPost, + body: Union[ + AffectPost, + AffectPost, + AffectPost, + ], ) -> Optional[OsidbApiV1AffectsCreateResponse201]: - """ """ + """ + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (AffectPost): Affect serializer + body (AffectPost): Affect serializer + body (AffectPost): Affect serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsCreateResponse201 + """ return sync_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - form_data: AffectPost, - multipart_data: AffectPost, - json_body: AffectPost, + body: Union[ + AffectPost, + AffectPost, + AffectPost, + ], ) -> Response[OsidbApiV1AffectsCreateResponse201]: + """ + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (AffectPost): Affect serializer + body (AffectPost): Affect serializer + body (AffectPost): Affect serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsCreateResponse201] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -131,23 +178,37 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - form_data: AffectPost, - multipart_data: AffectPost, - json_body: AffectPost, + body: Union[ + AffectPost, + AffectPost, + AffectPost, + ], ) -> Optional[OsidbApiV1AffectsCreateResponse201]: - """ """ + """ + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (AffectPost): Affect serializer + body (AffectPost): Affect serializer + body (AffectPost): Affect serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsCreateResponse201 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_create.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_create.py index 7ef31d1..bc55a71 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_create.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_create.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.affect_cvss_post import AffectCVSSPost from ...models.osidb_api_v1_affects_cvss_scores_create_response_201 import ( OsidbApiV1AffectsCvssScoresCreateResponse201, @@ -10,81 +12,96 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = AffectCVSSPost def _get_kwargs( - affect_id: str, + affect_id: UUID, *, client: AuthenticatedClient, - form_data: AffectCVSSPost, - multipart_data: AffectCVSSPost, - json_body: AffectCVSSPost, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects/{affect_id}/cvss_scores".format( - client.base_url, - affect_id=affect_id, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + AffectCVSSPost, + AffectCVSSPost, + AffectCVSSPost, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/affects/{affect_id}/cvss_scores".format( + affect_id=affect_id, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, AffectCVSSPost): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsCvssScoresCreateResponse201]: if response.status_code == 201: + # } _response_201 = response.json() response_201: OsidbApiV1AffectsCvssScoresCreateResponse201 if isinstance(_response_201, Unset): response_201 = UNSET else: - response_201 = OsidbApiV1AffectsCvssScoresCreateResponse201.from_dict( - _response_201 - ) + response_201 = OsidbApiV1AffectsCvssScoresCreateResponse201.from_dict(_response_201) return response_201 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsCvssScoresCreateResponse201]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - affect_id: str, + affect_id: UUID, *, client: AuthenticatedClient, - form_data: AffectCVSSPost, - multipart_data: AffectCVSSPost, - json_body: AffectCVSSPost, + body: Union[ + AffectCVSSPost, + AffectCVSSPost, + AffectCVSSPost, + ], ) -> Response[OsidbApiV1AffectsCvssScoresCreateResponse201]: + """ + Args: + affect_id (UUID): + bugzilla_api_key (str): + body (AffectCVSSPost): AffectCVSS serializer + body (AffectCVSSPost): AffectCVSS serializer + body (AffectCVSSPost): AffectCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsCvssScoresCreateResponse201] + """ + kwargs = _get_kwargs( affect_id=affect_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -95,42 +112,72 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - affect_id: str, + affect_id: UUID, *, client: AuthenticatedClient, - form_data: AffectCVSSPost, - multipart_data: AffectCVSSPost, - json_body: AffectCVSSPost, + body: Union[ + AffectCVSSPost, + AffectCVSSPost, + AffectCVSSPost, + ], ) -> Optional[OsidbApiV1AffectsCvssScoresCreateResponse201]: - """ """ + """ + Args: + affect_id (UUID): + bugzilla_api_key (str): + body (AffectCVSSPost): AffectCVSS serializer + body (AffectCVSSPost): AffectCVSS serializer + body (AffectCVSSPost): AffectCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsCvssScoresCreateResponse201 + """ return sync_detailed( affect_id=affect_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( - affect_id: str, +async def asyncio_detailed( + affect_id: UUID, *, client: AuthenticatedClient, - form_data: AffectCVSSPost, - multipart_data: AffectCVSSPost, - json_body: AffectCVSSPost, + body: Union[ + AffectCVSSPost, + AffectCVSSPost, + AffectCVSSPost, + ], ) -> Response[OsidbApiV1AffectsCvssScoresCreateResponse201]: + """ + Args: + affect_id (UUID): + bugzilla_api_key (str): + body (AffectCVSSPost): AffectCVSS serializer + body (AffectCVSSPost): AffectCVSS serializer + body (AffectCVSSPost): AffectCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsCvssScoresCreateResponse201] + """ + kwargs = _get_kwargs( affect_id=affect_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -141,25 +188,39 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - affect_id: str, +async def asyncio( + affect_id: UUID, *, client: AuthenticatedClient, - form_data: AffectCVSSPost, - multipart_data: AffectCVSSPost, - json_body: AffectCVSSPost, + body: Union[ + AffectCVSSPost, + AffectCVSSPost, + AffectCVSSPost, + ], ) -> Optional[OsidbApiV1AffectsCvssScoresCreateResponse201]: - """ """ + """ + Args: + affect_id (UUID): + bugzilla_api_key (str): + body (AffectCVSSPost): AffectCVSS serializer + body (AffectCVSSPost): AffectCVSS serializer + body (AffectCVSSPost): AffectCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsCvssScoresCreateResponse201 + """ return ( - await async_detailed( + await asyncio_detailed( affect_id=affect_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_destroy.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_destroy.py index 9fb9fd3..f57723a 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_destroy.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_destroy.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_affects_cvss_scores_destroy_response_200 import ( OsidbApiV1AffectsCvssScoresDestroyResponse200, ) @@ -12,59 +14,71 @@ def _get_kwargs( - affect_id: str, + affect_id: UUID, id: str, *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects/{affect_id}/cvss_scores/{id}".format( - client.base_url, - affect_id=affect_id, - id=id, - ) +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/affects/{affect_id}/cvss_scores/{id}".format( + affect_id=affect_id, + id=id, + ), } + _kwargs["headers"] = headers + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsCvssScoresDestroyResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AffectsCvssScoresDestroyResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1AffectsCvssScoresDestroyResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1AffectsCvssScoresDestroyResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsCvssScoresDestroyResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - affect_id: str, + affect_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1AffectsCvssScoresDestroyResponse200]: + """Destroy the instance and proxy the delete to Bugzilla. + + Args: + affect_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsCvssScoresDestroyResponse200] + """ + kwargs = _get_kwargs( affect_id=affect_id, id=id, @@ -79,16 +93,29 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - affect_id: str, + affect_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1AffectsCvssScoresDestroyResponse200]: - """Destroy the instance and proxy the delete to Bugzilla.""" + """Destroy the instance and proxy the delete to Bugzilla. + + Args: + affect_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsCvssScoresDestroyResponse200 + """ return sync_detailed( affect_id=affect_id, @@ -97,12 +124,27 @@ def sync( ).parsed -async def async_detailed( - affect_id: str, +async def asyncio_detailed( + affect_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1AffectsCvssScoresDestroyResponse200]: + """Destroy the instance and proxy the delete to Bugzilla. + + Args: + affect_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsCvssScoresDestroyResponse200] + """ + kwargs = _get_kwargs( affect_id=affect_id, id=id, @@ -117,19 +159,32 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - affect_id: str, +async def asyncio( + affect_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1AffectsCvssScoresDestroyResponse200]: - """Destroy the instance and proxy the delete to Bugzilla.""" + """Destroy the instance and proxy the delete to Bugzilla. + + Args: + affect_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsCvssScoresDestroyResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( affect_id=affect_id, id=id, client=client, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_list.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_list.py index c775b74..03e0e3a 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_list.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_list.py @@ -1,9 +1,11 @@ import datetime -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_affects_cvss_scores_list_issuer import ( OsidbApiV1AffectsCvssScoresListIssuer, ) @@ -23,8 +25,8 @@ "created_dt__lt": datetime.datetime, "created_dt__lte": datetime.datetime, "cvss_version": str, - "exclude_fields": List[str], - "include_fields": List[str], + "exclude_fields": list[str], + "include_fields": list[str], "issuer": OsidbApiV1AffectsCvssScoresListIssuer, "limit": int, "offset": int, @@ -37,238 +39,283 @@ "updated_dt__gte": datetime.datetime, "updated_dt__lt": datetime.datetime, "updated_dt__lte": datetime.datetime, - "uuid": str, + "uuid": UUID, "vector": str, } def _get_kwargs( - affect_id: str, + affect_id: UUID, *, client: AuthenticatedClient, - comment: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_version: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - issuer: Union[Unset, None, OsidbApiV1AffectsCvssScoresListIssuer] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - score: Union[Unset, None, float] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - vector: Union[Unset, None, str] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects/{affect_id}/cvss_scores".format( - client.base_url, - affect_id=affect_id, - ) - - headers: Dict[str, Any] = client.get_headers() - - json_created_dt: Union[Unset, None, str] = UNSET + comment: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_version: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + issuer: Union[Unset, OsidbApiV1AffectsCvssScoresListIssuer] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + score: Union[Unset, float] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + vector: Union[Unset, str] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + params["comment"] = comment + + json_created_dt: Union[Unset, str] = UNSET if not isinstance(created_dt, Unset): - json_created_dt = created_dt.isoformat() if created_dt else None + json_created_dt = created_dt.isoformat() + + params["created_dt"] = json_created_dt - json_created_dt_date: Union[Unset, None, str] = UNSET + json_created_dt_date: Union[Unset, str] = UNSET if not isinstance(created_dt_date, Unset): - json_created_dt_date = created_dt_date.isoformat() if created_dt_date else None + json_created_dt_date = created_dt_date.isoformat() + + params["created_dt__date"] = json_created_dt_date - json_created_dt_date_gte: Union[Unset, None, str] = UNSET + json_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_gte, Unset): - json_created_dt_date_gte = ( - created_dt_date_gte.isoformat() if created_dt_date_gte else None - ) + json_created_dt_date_gte = created_dt_date_gte.isoformat() + + params["created_dt__date__gte"] = json_created_dt_date_gte - json_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_lte, Unset): - json_created_dt_date_lte = ( - created_dt_date_lte.isoformat() if created_dt_date_lte else None - ) + json_created_dt_date_lte = created_dt_date_lte.isoformat() - json_created_dt_gt: Union[Unset, None, str] = UNSET + params["created_dt__date__lte"] = json_created_dt_date_lte + + json_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(created_dt_gt, Unset): - json_created_dt_gt = created_dt_gt.isoformat() if created_dt_gt else None + json_created_dt_gt = created_dt_gt.isoformat() + + params["created_dt__gt"] = json_created_dt_gt - json_created_dt_gte: Union[Unset, None, str] = UNSET + json_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_gte, Unset): - json_created_dt_gte = created_dt_gte.isoformat() if created_dt_gte else None + json_created_dt_gte = created_dt_gte.isoformat() - json_created_dt_lt: Union[Unset, None, str] = UNSET + params["created_dt__gte"] = json_created_dt_gte + + json_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(created_dt_lt, Unset): - json_created_dt_lt = created_dt_lt.isoformat() if created_dt_lt else None + json_created_dt_lt = created_dt_lt.isoformat() + + params["created_dt__lt"] = json_created_dt_lt - json_created_dt_lte: Union[Unset, None, str] = UNSET + json_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_lte, Unset): - json_created_dt_lte = created_dt_lte.isoformat() if created_dt_lte else None + json_created_dt_lte = created_dt_lte.isoformat() + + params["created_dt__lte"] = json_created_dt_lte - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + params["cvss_version"] = cvss_version + + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + params["exclude_fields"] = json_exclude_fields + + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields - json_issuer: Union[Unset, None, str] = UNSET + json_issuer: Union[Unset, str] = UNSET if not isinstance(issuer, Unset): + json_issuer = OsidbApiV1AffectsCvssScoresListIssuer(issuer).value - json_issuer = ( - OsidbApiV1AffectsCvssScoresListIssuer(issuer).value if issuer else None - ) + params["issuer"] = json_issuer + + params["limit"] = limit - json_updated_dt: Union[Unset, None, str] = UNSET + params["offset"] = offset + + params["score"] = score + + json_updated_dt: Union[Unset, str] = UNSET if not isinstance(updated_dt, Unset): - json_updated_dt = updated_dt.isoformat() if updated_dt else None + json_updated_dt = updated_dt.isoformat() + + params["updated_dt"] = json_updated_dt - json_updated_dt_date: Union[Unset, None, str] = UNSET + json_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(updated_dt_date, Unset): - json_updated_dt_date = updated_dt_date.isoformat() if updated_dt_date else None + json_updated_dt_date = updated_dt_date.isoformat() - json_updated_dt_date_gte: Union[Unset, None, str] = UNSET + params["updated_dt__date"] = json_updated_dt_date + + json_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_gte, Unset): - json_updated_dt_date_gte = ( - updated_dt_date_gte.isoformat() if updated_dt_date_gte else None - ) + json_updated_dt_date_gte = updated_dt_date_gte.isoformat() + + params["updated_dt__date__gte"] = json_updated_dt_date_gte - json_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_lte, Unset): - json_updated_dt_date_lte = ( - updated_dt_date_lte.isoformat() if updated_dt_date_lte else None - ) + json_updated_dt_date_lte = updated_dt_date_lte.isoformat() + + params["updated_dt__date__lte"] = json_updated_dt_date_lte - json_updated_dt_gt: Union[Unset, None, str] = UNSET + json_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(updated_dt_gt, Unset): - json_updated_dt_gt = updated_dt_gt.isoformat() if updated_dt_gt else None + json_updated_dt_gt = updated_dt_gt.isoformat() + + params["updated_dt__gt"] = json_updated_dt_gt - json_updated_dt_gte: Union[Unset, None, str] = UNSET + json_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_gte, Unset): - json_updated_dt_gte = updated_dt_gte.isoformat() if updated_dt_gte else None + json_updated_dt_gte = updated_dt_gte.isoformat() - json_updated_dt_lt: Union[Unset, None, str] = UNSET + params["updated_dt__gte"] = json_updated_dt_gte + + json_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(updated_dt_lt, Unset): - json_updated_dt_lt = updated_dt_lt.isoformat() if updated_dt_lt else None + json_updated_dt_lt = updated_dt_lt.isoformat() + + params["updated_dt__lt"] = json_updated_dt_lt - json_updated_dt_lte: Union[Unset, None, str] = UNSET + json_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_lte, Unset): - json_updated_dt_lte = updated_dt_lte.isoformat() if updated_dt_lte else None - - params: Dict[str, Any] = { - "comment": comment, - "created_dt": json_created_dt, - "created_dt__date": json_created_dt_date, - "created_dt__date__gte": json_created_dt_date_gte, - "created_dt__date__lte": json_created_dt_date_lte, - "created_dt__gt": json_created_dt_gt, - "created_dt__gte": json_created_dt_gte, - "created_dt__lt": json_created_dt_lt, - "created_dt__lte": json_created_dt_lte, - "cvss_version": cvss_version, - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - "issuer": json_issuer, - "limit": limit, - "offset": offset, - "score": score, - "updated_dt": json_updated_dt, - "updated_dt__date": json_updated_dt_date, - "updated_dt__date__gte": json_updated_dt_date_gte, - "updated_dt__date__lte": json_updated_dt_date_lte, - "updated_dt__gt": json_updated_dt_gt, - "updated_dt__gte": json_updated_dt_gte, - "updated_dt__lt": json_updated_dt_lt, - "updated_dt__lte": json_updated_dt_lte, - "uuid": uuid, - "vector": vector, - } + json_updated_dt_lte = updated_dt_lte.isoformat() + + params["updated_dt__lte"] = json_updated_dt_lte + + json_uuid: Union[Unset, str] = UNSET + if not isinstance(uuid, Unset): + json_uuid = str(uuid) + + params["uuid"] = json_uuid + + params["vector"] = vector + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/affects/{affect_id}/cvss_scores".format( + affect_id=affect_id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsCvssScoresListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AffectsCvssScoresListResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1AffectsCvssScoresListResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1AffectsCvssScoresListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsCvssScoresListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - affect_id: str, + affect_id: UUID, *, client: AuthenticatedClient, - comment: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_version: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - issuer: Union[Unset, None, OsidbApiV1AffectsCvssScoresListIssuer] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - score: Union[Unset, None, float] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - vector: Union[Unset, None, str] = UNSET, + comment: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_version: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + issuer: Union[Unset, OsidbApiV1AffectsCvssScoresListIssuer] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + score: Union[Unset, float] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + vector: Union[Unset, str] = UNSET, ) -> Response[OsidbApiV1AffectsCvssScoresListResponse200]: + """ + Args: + affect_id (UUID): + comment (Union[Unset, str]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cvss_version (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + issuer (Union[Unset, OsidbApiV1AffectsCvssScoresListIssuer]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + score (Union[Unset, float]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + vector (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsCvssScoresListResponse200] + """ + kwargs = _get_kwargs( affect_id=affect_id, client=client, @@ -308,41 +355,77 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - affect_id: str, + affect_id: UUID, *, client: AuthenticatedClient, - comment: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_version: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - issuer: Union[Unset, None, OsidbApiV1AffectsCvssScoresListIssuer] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - score: Union[Unset, None, float] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - vector: Union[Unset, None, str] = UNSET, + comment: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_version: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + issuer: Union[Unset, OsidbApiV1AffectsCvssScoresListIssuer] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + score: Union[Unset, float] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + vector: Union[Unset, str] = UNSET, ) -> Optional[OsidbApiV1AffectsCvssScoresListResponse200]: - """ """ + """ + Args: + affect_id (UUID): + comment (Union[Unset, str]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cvss_version (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + issuer (Union[Unset, OsidbApiV1AffectsCvssScoresListIssuer]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + score (Union[Unset, float]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + vector (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsCvssScoresListResponse200 + """ return sync_detailed( affect_id=affect_id, @@ -376,37 +459,75 @@ def sync( ).parsed -async def async_detailed( - affect_id: str, +async def asyncio_detailed( + affect_id: UUID, *, client: AuthenticatedClient, - comment: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_version: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - issuer: Union[Unset, None, OsidbApiV1AffectsCvssScoresListIssuer] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - score: Union[Unset, None, float] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - vector: Union[Unset, None, str] = UNSET, + comment: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_version: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + issuer: Union[Unset, OsidbApiV1AffectsCvssScoresListIssuer] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + score: Union[Unset, float] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + vector: Union[Unset, str] = UNSET, ) -> Response[OsidbApiV1AffectsCvssScoresListResponse200]: + """ + Args: + affect_id (UUID): + comment (Union[Unset, str]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cvss_version (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + issuer (Union[Unset, OsidbApiV1AffectsCvssScoresListIssuer]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + score (Union[Unset, float]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + vector (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsCvssScoresListResponse200] + """ + kwargs = _get_kwargs( affect_id=affect_id, client=client, @@ -446,44 +567,80 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - affect_id: str, +async def asyncio( + affect_id: UUID, *, client: AuthenticatedClient, - comment: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_version: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - issuer: Union[Unset, None, OsidbApiV1AffectsCvssScoresListIssuer] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - score: Union[Unset, None, float] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - vector: Union[Unset, None, str] = UNSET, + comment: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_version: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + issuer: Union[Unset, OsidbApiV1AffectsCvssScoresListIssuer] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + score: Union[Unset, float] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + vector: Union[Unset, str] = UNSET, ) -> Optional[OsidbApiV1AffectsCvssScoresListResponse200]: - """ """ + """ + Args: + affect_id (UUID): + comment (Union[Unset, str]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cvss_version (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + issuer (Union[Unset, OsidbApiV1AffectsCvssScoresListIssuer]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + score (Union[Unset, float]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + vector (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsCvssScoresListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( affect_id=affect_id, client=client, comment=comment, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_retrieve.py index 5bb685a..cfb808d 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_retrieve.py @@ -1,98 +1,105 @@ -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_affects_cvss_scores_retrieve_response_200 import ( OsidbApiV1AffectsCvssScoresRetrieveResponse200, ) from ...types import UNSET, Response, Unset QUERY_PARAMS = { - "exclude_fields": List[str], - "include_fields": List[str], + "exclude_fields": list[str], + "include_fields": list[str], } def _get_kwargs( - affect_id: str, + affect_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects/{affect_id}/cvss_scores/{id}".format( - client.base_url, - affect_id=affect_id, - id=id, - ) + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - headers: Dict[str, Any] = client.get_headers() - - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields - params: Dict[str, Any] = { - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/affects/{affect_id}/cvss_scores/{id}".format( + affect_id=affect_id, + id=id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsCvssScoresRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AffectsCvssScoresRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1AffectsCvssScoresRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1AffectsCvssScoresRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsCvssScoresRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - affect_id: str, + affect_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1AffectsCvssScoresRetrieveResponse200]: + """ + Args: + affect_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsCvssScoresRetrieveResponse200] + """ + kwargs = _get_kwargs( affect_id=affect_id, id=id, @@ -109,18 +116,31 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - affect_id: str, + affect_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1AffectsCvssScoresRetrieveResponse200]: - """ """ + """ + Args: + affect_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsCvssScoresRetrieveResponse200 + """ return sync_detailed( affect_id=affect_id, @@ -131,14 +151,29 @@ def sync( ).parsed -async def async_detailed( - affect_id: str, +async def asyncio_detailed( + affect_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1AffectsCvssScoresRetrieveResponse200]: + """ + Args: + affect_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsCvssScoresRetrieveResponse200] + """ + kwargs = _get_kwargs( affect_id=affect_id, id=id, @@ -155,21 +190,34 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - affect_id: str, +async def asyncio( + affect_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1AffectsCvssScoresRetrieveResponse200]: - """ """ + """ + Args: + affect_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsCvssScoresRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( affect_id=affect_id, id=id, client=client, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_update.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_update.py index c1a9c76..85e448d 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_update.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_cvss_scores_update.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.affect_cvss_put import AffectCVSSPut from ...models.osidb_api_v1_affects_cvss_scores_update_response_200 import ( OsidbApiV1AffectsCvssScoresUpdateResponse200, @@ -10,85 +12,101 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = AffectCVSSPut def _get_kwargs( - affect_id: str, + affect_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: AffectCVSSPut, - multipart_data: AffectCVSSPut, - json_body: AffectCVSSPut, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects/{affect_id}/cvss_scores/{id}".format( - client.base_url, - affect_id=affect_id, - id=id, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + AffectCVSSPut, + AffectCVSSPut, + AffectCVSSPut, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/affects/{affect_id}/cvss_scores/{id}".format( + affect_id=affect_id, + id=id, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, AffectCVSSPut): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsCvssScoresUpdateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AffectsCvssScoresUpdateResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1AffectsCvssScoresUpdateResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1AffectsCvssScoresUpdateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsCvssScoresUpdateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - affect_id: str, + affect_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: AffectCVSSPut, - multipart_data: AffectCVSSPut, - json_body: AffectCVSSPut, + body: Union[ + AffectCVSSPut, + AffectCVSSPut, + AffectCVSSPut, + ], ) -> Response[OsidbApiV1AffectsCvssScoresUpdateResponse200]: + """ + Args: + affect_id (UUID): + id (str): + bugzilla_api_key (str): + body (AffectCVSSPut): AffectCVSS serializer + body (AffectCVSSPut): AffectCVSS serializer + body (AffectCVSSPut): AffectCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsCvssScoresUpdateResponse200] + """ + kwargs = _get_kwargs( affect_id=affect_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.put( @@ -99,46 +117,78 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - affect_id: str, + affect_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: AffectCVSSPut, - multipart_data: AffectCVSSPut, - json_body: AffectCVSSPut, + body: Union[ + AffectCVSSPut, + AffectCVSSPut, + AffectCVSSPut, + ], ) -> Optional[OsidbApiV1AffectsCvssScoresUpdateResponse200]: - """ """ + """ + Args: + affect_id (UUID): + id (str): + bugzilla_api_key (str): + body (AffectCVSSPut): AffectCVSS serializer + body (AffectCVSSPut): AffectCVSS serializer + body (AffectCVSSPut): AffectCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsCvssScoresUpdateResponse200 + """ return sync_detailed( affect_id=affect_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( - affect_id: str, +async def asyncio_detailed( + affect_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: AffectCVSSPut, - multipart_data: AffectCVSSPut, - json_body: AffectCVSSPut, + body: Union[ + AffectCVSSPut, + AffectCVSSPut, + AffectCVSSPut, + ], ) -> Response[OsidbApiV1AffectsCvssScoresUpdateResponse200]: + """ + Args: + affect_id (UUID): + id (str): + bugzilla_api_key (str): + body (AffectCVSSPut): AffectCVSS serializer + body (AffectCVSSPut): AffectCVSS serializer + body (AffectCVSSPut): AffectCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsCvssScoresUpdateResponse200] + """ + kwargs = _get_kwargs( affect_id=affect_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().put( @@ -149,27 +199,42 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - affect_id: str, +async def asyncio( + affect_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: AffectCVSSPut, - multipart_data: AffectCVSSPut, - json_body: AffectCVSSPut, + body: Union[ + AffectCVSSPut, + AffectCVSSPut, + AffectCVSSPut, + ], ) -> Optional[OsidbApiV1AffectsCvssScoresUpdateResponse200]: - """ """ + """ + Args: + affect_id (UUID): + id (str): + bugzilla_api_key (str): + body (AffectCVSSPut): AffectCVSS serializer + body (AffectCVSSPut): AffectCVSS serializer + body (AffectCVSSPut): AffectCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsCvssScoresUpdateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( affect_id=affect_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_destroy.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_destroy.py index 479a3bb..91c7c58 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_destroy.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_destroy.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_affects_destroy_response_200 import ( OsidbApiV1AffectsDestroyResponse200, ) @@ -12,27 +14,27 @@ def _get_kwargs( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects/{uuid}".format( - client.base_url, - uuid=uuid, - ) - - headers: Dict[str, Any] = client.get_headers() +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/affects/{uuid}".format( + uuid=uuid, + ), } + _kwargs["headers"] = headers + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsDestroyResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AffectsDestroyResponse200 if isinstance(_response_200, Unset): @@ -41,25 +43,38 @@ def _parse_response( response_200 = OsidbApiV1AffectsDestroyResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsDestroyResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1AffectsDestroyResponse200]: + """Destroy the instance and proxy the delete to Bugzilla + + Args: + uuid (UUID): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsDestroyResponse200] + """ + kwargs = _get_kwargs( uuid=uuid, client=client, @@ -73,15 +88,27 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1AffectsDestroyResponse200]: - """Destroy the instance and proxy the delete to Bugzilla""" + """Destroy the instance and proxy the delete to Bugzilla + + Args: + uuid (UUID): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsDestroyResponse200 + """ return sync_detailed( uuid=uuid, @@ -89,11 +116,25 @@ def sync( ).parsed -async def async_detailed( - uuid: str, +async def asyncio_detailed( + uuid: UUID, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1AffectsDestroyResponse200]: + """Destroy the instance and proxy the delete to Bugzilla + + Args: + uuid (UUID): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsDestroyResponse200] + """ + kwargs = _get_kwargs( uuid=uuid, client=client, @@ -107,18 +148,30 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - uuid: str, +async def asyncio( + uuid: UUID, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1AffectsDestroyResponse200]: - """Destroy the instance and proxy the delete to Bugzilla""" + """Destroy the instance and proxy the delete to Bugzilla + + Args: + uuid (UUID): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsDestroyResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( uuid=uuid, client=client, ) diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_list.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_list.py index 1223462..6454b5e 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_list.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_list.py @@ -1,9 +1,11 @@ import datetime -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_affects_list_affectedness import ( OsidbApiV1AffectsListAffectedness, ) @@ -61,11 +63,11 @@ "cvss_scores__updated_dt__gte": datetime.datetime, "cvss_scores__updated_dt__lt": datetime.datetime, "cvss_scores__updated_dt__lte": datetime.datetime, - "cvss_scores__uuid": str, + "cvss_scores__uuid": UUID, "cvss_scores__vector": str, "embargoed": bool, - "exclude_fields": List[str], - "flaw__components": List[str], + "exclude_fields": list[str], + "flaw__components": list[str], "flaw__created_dt": datetime.datetime, "flaw__created_dt__date": datetime.date, "flaw__created_dt__date__gte": datetime.date, @@ -96,13 +98,13 @@ "flaw__updated_dt__gte": datetime.datetime, "flaw__updated_dt__lt": datetime.datetime, "flaw__updated_dt__lte": datetime.datetime, - "flaw__uuid": str, + "flaw__uuid": UUID, "impact": OsidbApiV1AffectsListImpact, - "include_fields": List[str], - "include_meta_attr": List[str], + "include_fields": list[str], + "include_meta_attr": list[str], "limit": int, "offset": int, - "order": List[OsidbApiV1AffectsListOrderItem], + "order": list[OsidbApiV1AffectsListOrderItem], "ps_component": str, "ps_module": str, "resolution": OsidbApiV1AffectsListResolution, @@ -128,7 +130,7 @@ "trackers__updated_dt__gte": datetime.datetime, "trackers__updated_dt__lt": datetime.datetime, "trackers__updated_dt__lte": datetime.datetime, - "trackers__uuid": str, + "trackers__uuid": UUID, "updated_dt": datetime.datetime, "updated_dt__date": datetime.date, "updated_dt__date__gte": datetime.date, @@ -137,782 +139,711 @@ "updated_dt__gte": datetime.datetime, "updated_dt__lt": datetime.datetime, "updated_dt__lte": datetime.datetime, - "uuid": str, + "uuid": UUID, } def _get_kwargs( *, client: AuthenticatedClient, - affectedness: Union[Unset, None, OsidbApiV1AffectsListAffectedness] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_comment: Union[Unset, None, str] = UNSET, - cvss_scores_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_cvss_version: Union[Unset, None, str] = UNSET, - cvss_scores_issuer: Union[ - Unset, None, OsidbApiV1AffectsListCvssScoresIssuer - ] = UNSET, - cvss_scores_score: Union[Unset, None, float] = UNSET, - cvss_scores_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_uuid: Union[Unset, None, str] = UNSET, - cvss_scores_vector: Union[Unset, None, str] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - flaw_components: Union[Unset, None, List[str]] = UNSET, - flaw_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_cve_id: Union[Unset, None, str] = UNSET, - flaw_cwe_id: Union[Unset, None, str] = UNSET, - flaw_embargoed: Union[Unset, None, bool] = UNSET, - flaw_impact: Union[Unset, None, OsidbApiV1AffectsListFlawImpact] = UNSET, - flaw_reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_source: Union[Unset, None, OsidbApiV1AffectsListFlawSource] = UNSET, - flaw_unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_uuid: Union[Unset, None, str] = UNSET, - impact: Union[Unset, None, OsidbApiV1AffectsListImpact] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1AffectsListOrderItem]] = UNSET, - ps_component: Union[Unset, None, str] = UNSET, - ps_module: Union[Unset, None, str] = UNSET, - resolution: Union[Unset, None, OsidbApiV1AffectsListResolution] = UNSET, - trackers_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_embargoed: Union[Unset, None, bool] = UNSET, - trackers_external_system_id: Union[Unset, None, str] = UNSET, - trackers_ps_update_stream: Union[Unset, None, str] = UNSET, - trackers_resolution: Union[Unset, None, str] = UNSET, - trackers_status: Union[Unset, None, str] = UNSET, - trackers_type: Union[Unset, None, OsidbApiV1AffectsListTrackersType] = UNSET, - trackers_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_uuid: Union[Unset, None, str] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - json_affectedness: Union[Unset, None, str] = UNSET + affectedness: Union[Unset, OsidbApiV1AffectsListAffectedness] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_comment: Union[Unset, str] = UNSET, + cvss_scores_created_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_cvss_version: Union[Unset, str] = UNSET, + cvss_scores_issuer: Union[Unset, OsidbApiV1AffectsListCvssScoresIssuer] = UNSET, + cvss_scores_score: Union[Unset, float] = UNSET, + cvss_scores_updated_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_uuid: Union[Unset, UUID] = UNSET, + cvss_scores_vector: Union[Unset, str] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + flaw_components: Union[Unset, list[str]] = UNSET, + flaw_created_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_cve_id: Union[Unset, str] = UNSET, + flaw_cwe_id: Union[Unset, str] = UNSET, + flaw_embargoed: Union[Unset, bool] = UNSET, + flaw_impact: Union[Unset, OsidbApiV1AffectsListFlawImpact] = UNSET, + flaw_reported_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_source: Union[Unset, OsidbApiV1AffectsListFlawSource] = UNSET, + flaw_unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_uuid: Union[Unset, UUID] = UNSET, + impact: Union[Unset, OsidbApiV1AffectsListImpact] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1AffectsListOrderItem]] = UNSET, + ps_component: Union[Unset, str] = UNSET, + ps_module: Union[Unset, str] = UNSET, + resolution: Union[Unset, OsidbApiV1AffectsListResolution] = UNSET, + trackers_created_dt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_date: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + trackers_embargoed: Union[Unset, bool] = UNSET, + trackers_external_system_id: Union[Unset, str] = UNSET, + trackers_ps_update_stream: Union[Unset, str] = UNSET, + trackers_resolution: Union[Unset, str] = UNSET, + trackers_status: Union[Unset, str] = UNSET, + trackers_type: Union[Unset, OsidbApiV1AffectsListTrackersType] = UNSET, + trackers_updated_dt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_date: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + trackers_uuid: Union[Unset, UUID] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + json_affectedness: Union[Unset, str] = UNSET if not isinstance(affectedness, Unset): + json_affectedness = OsidbApiV1AffectsListAffectedness(affectedness).value - json_affectedness = ( - OsidbApiV1AffectsListAffectedness(affectedness).value - if affectedness - else None - ) + params["affectedness"] = json_affectedness - json_created_dt: Union[Unset, None, str] = UNSET + json_created_dt: Union[Unset, str] = UNSET if not isinstance(created_dt, Unset): - json_created_dt = created_dt.isoformat() if created_dt else None + json_created_dt = created_dt.isoformat() + + params["created_dt"] = json_created_dt - json_created_dt_date: Union[Unset, None, str] = UNSET + json_created_dt_date: Union[Unset, str] = UNSET if not isinstance(created_dt_date, Unset): - json_created_dt_date = created_dt_date.isoformat() if created_dt_date else None + json_created_dt_date = created_dt_date.isoformat() + + params["created_dt__date"] = json_created_dt_date - json_created_dt_date_gte: Union[Unset, None, str] = UNSET + json_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_gte, Unset): - json_created_dt_date_gte = ( - created_dt_date_gte.isoformat() if created_dt_date_gte else None - ) + json_created_dt_date_gte = created_dt_date_gte.isoformat() + + params["created_dt__date__gte"] = json_created_dt_date_gte - json_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_lte, Unset): - json_created_dt_date_lte = ( - created_dt_date_lte.isoformat() if created_dt_date_lte else None - ) + json_created_dt_date_lte = created_dt_date_lte.isoformat() - json_created_dt_gt: Union[Unset, None, str] = UNSET + params["created_dt__date__lte"] = json_created_dt_date_lte + + json_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(created_dt_gt, Unset): - json_created_dt_gt = created_dt_gt.isoformat() if created_dt_gt else None + json_created_dt_gt = created_dt_gt.isoformat() + + params["created_dt__gt"] = json_created_dt_gt - json_created_dt_gte: Union[Unset, None, str] = UNSET + json_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_gte, Unset): - json_created_dt_gte = created_dt_gte.isoformat() if created_dt_gte else None + json_created_dt_gte = created_dt_gte.isoformat() + + params["created_dt__gte"] = json_created_dt_gte - json_created_dt_lt: Union[Unset, None, str] = UNSET + json_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(created_dt_lt, Unset): - json_created_dt_lt = created_dt_lt.isoformat() if created_dt_lt else None + json_created_dt_lt = created_dt_lt.isoformat() - json_created_dt_lte: Union[Unset, None, str] = UNSET + params["created_dt__lt"] = json_created_dt_lt + + json_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_lte, Unset): - json_created_dt_lte = created_dt_lte.isoformat() if created_dt_lte else None + json_created_dt_lte = created_dt_lte.isoformat() + + params["created_dt__lte"] = json_created_dt_lte + + params["cvss_scores__comment"] = cvss_scores_comment - json_cvss_scores_created_dt: Union[Unset, None, str] = UNSET + json_cvss_scores_created_dt: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt, Unset): - json_cvss_scores_created_dt = ( - cvss_scores_created_dt.isoformat() if cvss_scores_created_dt else None - ) + json_cvss_scores_created_dt = cvss_scores_created_dt.isoformat() + + params["cvss_scores__created_dt"] = json_cvss_scores_created_dt - json_cvss_scores_created_dt_date: Union[Unset, None, str] = UNSET + json_cvss_scores_created_dt_date: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_date, Unset): - json_cvss_scores_created_dt_date = ( - cvss_scores_created_dt_date.isoformat() - if cvss_scores_created_dt_date - else None - ) + json_cvss_scores_created_dt_date = cvss_scores_created_dt_date.isoformat() - json_cvss_scores_created_dt_date_gte: Union[Unset, None, str] = UNSET + params["cvss_scores__created_dt__date"] = json_cvss_scores_created_dt_date + + json_cvss_scores_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_date_gte, Unset): - json_cvss_scores_created_dt_date_gte = ( - cvss_scores_created_dt_date_gte.isoformat() - if cvss_scores_created_dt_date_gte - else None - ) + json_cvss_scores_created_dt_date_gte = cvss_scores_created_dt_date_gte.isoformat() + + params["cvss_scores__created_dt__date__gte"] = json_cvss_scores_created_dt_date_gte - json_cvss_scores_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_cvss_scores_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_date_lte, Unset): - json_cvss_scores_created_dt_date_lte = ( - cvss_scores_created_dt_date_lte.isoformat() - if cvss_scores_created_dt_date_lte - else None - ) + json_cvss_scores_created_dt_date_lte = cvss_scores_created_dt_date_lte.isoformat() + + params["cvss_scores__created_dt__date__lte"] = json_cvss_scores_created_dt_date_lte - json_cvss_scores_created_dt_gt: Union[Unset, None, str] = UNSET + json_cvss_scores_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_gt, Unset): - json_cvss_scores_created_dt_gt = ( - cvss_scores_created_dt_gt.isoformat() if cvss_scores_created_dt_gt else None - ) + json_cvss_scores_created_dt_gt = cvss_scores_created_dt_gt.isoformat() + + params["cvss_scores__created_dt__gt"] = json_cvss_scores_created_dt_gt - json_cvss_scores_created_dt_gte: Union[Unset, None, str] = UNSET + json_cvss_scores_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_gte, Unset): - json_cvss_scores_created_dt_gte = ( - cvss_scores_created_dt_gte.isoformat() - if cvss_scores_created_dt_gte - else None - ) + json_cvss_scores_created_dt_gte = cvss_scores_created_dt_gte.isoformat() - json_cvss_scores_created_dt_lt: Union[Unset, None, str] = UNSET + params["cvss_scores__created_dt__gte"] = json_cvss_scores_created_dt_gte + + json_cvss_scores_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_lt, Unset): - json_cvss_scores_created_dt_lt = ( - cvss_scores_created_dt_lt.isoformat() if cvss_scores_created_dt_lt else None - ) + json_cvss_scores_created_dt_lt = cvss_scores_created_dt_lt.isoformat() + + params["cvss_scores__created_dt__lt"] = json_cvss_scores_created_dt_lt - json_cvss_scores_created_dt_lte: Union[Unset, None, str] = UNSET + json_cvss_scores_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_lte, Unset): - json_cvss_scores_created_dt_lte = ( - cvss_scores_created_dt_lte.isoformat() - if cvss_scores_created_dt_lte - else None - ) + json_cvss_scores_created_dt_lte = cvss_scores_created_dt_lte.isoformat() + + params["cvss_scores__created_dt__lte"] = json_cvss_scores_created_dt_lte + + params["cvss_scores__cvss_version"] = cvss_scores_cvss_version - json_cvss_scores_issuer: Union[Unset, None, str] = UNSET + json_cvss_scores_issuer: Union[Unset, str] = UNSET if not isinstance(cvss_scores_issuer, Unset): + json_cvss_scores_issuer = OsidbApiV1AffectsListCvssScoresIssuer(cvss_scores_issuer).value - json_cvss_scores_issuer = ( - OsidbApiV1AffectsListCvssScoresIssuer(cvss_scores_issuer).value - if cvss_scores_issuer - else None - ) + params["cvss_scores__issuer"] = json_cvss_scores_issuer + + params["cvss_scores__score"] = cvss_scores_score - json_cvss_scores_updated_dt: Union[Unset, None, str] = UNSET + json_cvss_scores_updated_dt: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt, Unset): - json_cvss_scores_updated_dt = ( - cvss_scores_updated_dt.isoformat() if cvss_scores_updated_dt else None - ) + json_cvss_scores_updated_dt = cvss_scores_updated_dt.isoformat() + + params["cvss_scores__updated_dt"] = json_cvss_scores_updated_dt - json_cvss_scores_updated_dt_date: Union[Unset, None, str] = UNSET + json_cvss_scores_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_date, Unset): - json_cvss_scores_updated_dt_date = ( - cvss_scores_updated_dt_date.isoformat() - if cvss_scores_updated_dt_date - else None - ) + json_cvss_scores_updated_dt_date = cvss_scores_updated_dt_date.isoformat() - json_cvss_scores_updated_dt_date_gte: Union[Unset, None, str] = UNSET + params["cvss_scores__updated_dt__date"] = json_cvss_scores_updated_dt_date + + json_cvss_scores_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_date_gte, Unset): - json_cvss_scores_updated_dt_date_gte = ( - cvss_scores_updated_dt_date_gte.isoformat() - if cvss_scores_updated_dt_date_gte - else None - ) + json_cvss_scores_updated_dt_date_gte = cvss_scores_updated_dt_date_gte.isoformat() + + params["cvss_scores__updated_dt__date__gte"] = json_cvss_scores_updated_dt_date_gte - json_cvss_scores_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_cvss_scores_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_date_lte, Unset): - json_cvss_scores_updated_dt_date_lte = ( - cvss_scores_updated_dt_date_lte.isoformat() - if cvss_scores_updated_dt_date_lte - else None - ) + json_cvss_scores_updated_dt_date_lte = cvss_scores_updated_dt_date_lte.isoformat() + + params["cvss_scores__updated_dt__date__lte"] = json_cvss_scores_updated_dt_date_lte - json_cvss_scores_updated_dt_gt: Union[Unset, None, str] = UNSET + json_cvss_scores_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_gt, Unset): - json_cvss_scores_updated_dt_gt = ( - cvss_scores_updated_dt_gt.isoformat() if cvss_scores_updated_dt_gt else None - ) + json_cvss_scores_updated_dt_gt = cvss_scores_updated_dt_gt.isoformat() - json_cvss_scores_updated_dt_gte: Union[Unset, None, str] = UNSET + params["cvss_scores__updated_dt__gt"] = json_cvss_scores_updated_dt_gt + + json_cvss_scores_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_gte, Unset): - json_cvss_scores_updated_dt_gte = ( - cvss_scores_updated_dt_gte.isoformat() - if cvss_scores_updated_dt_gte - else None - ) + json_cvss_scores_updated_dt_gte = cvss_scores_updated_dt_gte.isoformat() - json_cvss_scores_updated_dt_lt: Union[Unset, None, str] = UNSET + params["cvss_scores__updated_dt__gte"] = json_cvss_scores_updated_dt_gte + + json_cvss_scores_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_lt, Unset): - json_cvss_scores_updated_dt_lt = ( - cvss_scores_updated_dt_lt.isoformat() if cvss_scores_updated_dt_lt else None - ) + json_cvss_scores_updated_dt_lt = cvss_scores_updated_dt_lt.isoformat() + + params["cvss_scores__updated_dt__lt"] = json_cvss_scores_updated_dt_lt - json_cvss_scores_updated_dt_lte: Union[Unset, None, str] = UNSET + json_cvss_scores_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_lte, Unset): - json_cvss_scores_updated_dt_lte = ( - cvss_scores_updated_dt_lte.isoformat() - if cvss_scores_updated_dt_lte - else None - ) + json_cvss_scores_updated_dt_lte = cvss_scores_updated_dt_lte.isoformat() + + params["cvss_scores__updated_dt__lte"] = json_cvss_scores_updated_dt_lte + + json_cvss_scores_uuid: Union[Unset, str] = UNSET + if not isinstance(cvss_scores_uuid, Unset): + json_cvss_scores_uuid = str(cvss_scores_uuid) - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + params["cvss_scores__uuid"] = json_cvss_scores_uuid + + params["cvss_scores__vector"] = cvss_scores_vector + + params["embargoed"] = embargoed + + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_flaw_components: Union[Unset, None, List[str]] = UNSET + json_flaw_components: Union[Unset, list[str]] = UNSET if not isinstance(flaw_components, Unset): - if flaw_components is None: - json_flaw_components = None - else: - json_flaw_components = flaw_components + json_flaw_components = flaw_components - json_flaw_created_dt: Union[Unset, None, str] = UNSET + params["flaw__components"] = json_flaw_components + + json_flaw_created_dt: Union[Unset, str] = UNSET if not isinstance(flaw_created_dt, Unset): - json_flaw_created_dt = flaw_created_dt.isoformat() if flaw_created_dt else None + json_flaw_created_dt = flaw_created_dt.isoformat() + + params["flaw__created_dt"] = json_flaw_created_dt - json_flaw_created_dt_date: Union[Unset, None, str] = UNSET + json_flaw_created_dt_date: Union[Unset, str] = UNSET if not isinstance(flaw_created_dt_date, Unset): - json_flaw_created_dt_date = ( - flaw_created_dt_date.isoformat() if flaw_created_dt_date else None - ) + json_flaw_created_dt_date = flaw_created_dt_date.isoformat() - json_flaw_created_dt_date_gte: Union[Unset, None, str] = UNSET + params["flaw__created_dt__date"] = json_flaw_created_dt_date + + json_flaw_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(flaw_created_dt_date_gte, Unset): - json_flaw_created_dt_date_gte = ( - flaw_created_dt_date_gte.isoformat() if flaw_created_dt_date_gte else None - ) + json_flaw_created_dt_date_gte = flaw_created_dt_date_gte.isoformat() + + params["flaw__created_dt__date__gte"] = json_flaw_created_dt_date_gte - json_flaw_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_flaw_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(flaw_created_dt_date_lte, Unset): - json_flaw_created_dt_date_lte = ( - flaw_created_dt_date_lte.isoformat() if flaw_created_dt_date_lte else None - ) + json_flaw_created_dt_date_lte = flaw_created_dt_date_lte.isoformat() + + params["flaw__created_dt__date__lte"] = json_flaw_created_dt_date_lte - json_flaw_created_dt_gt: Union[Unset, None, str] = UNSET + json_flaw_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(flaw_created_dt_gt, Unset): - json_flaw_created_dt_gt = ( - flaw_created_dt_gt.isoformat() if flaw_created_dt_gt else None - ) + json_flaw_created_dt_gt = flaw_created_dt_gt.isoformat() - json_flaw_created_dt_gte: Union[Unset, None, str] = UNSET + params["flaw__created_dt__gt"] = json_flaw_created_dt_gt + + json_flaw_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(flaw_created_dt_gte, Unset): - json_flaw_created_dt_gte = ( - flaw_created_dt_gte.isoformat() if flaw_created_dt_gte else None - ) + json_flaw_created_dt_gte = flaw_created_dt_gte.isoformat() - json_flaw_created_dt_lt: Union[Unset, None, str] = UNSET + params["flaw__created_dt__gte"] = json_flaw_created_dt_gte + + json_flaw_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(flaw_created_dt_lt, Unset): - json_flaw_created_dt_lt = ( - flaw_created_dt_lt.isoformat() if flaw_created_dt_lt else None - ) + json_flaw_created_dt_lt = flaw_created_dt_lt.isoformat() + + params["flaw__created_dt__lt"] = json_flaw_created_dt_lt - json_flaw_created_dt_lte: Union[Unset, None, str] = UNSET + json_flaw_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(flaw_created_dt_lte, Unset): - json_flaw_created_dt_lte = ( - flaw_created_dt_lte.isoformat() if flaw_created_dt_lte else None - ) + json_flaw_created_dt_lte = flaw_created_dt_lte.isoformat() + + params["flaw__created_dt__lte"] = json_flaw_created_dt_lte + + params["flaw__cve_id"] = flaw_cve_id - json_flaw_impact: Union[Unset, None, str] = UNSET + params["flaw__cwe_id"] = flaw_cwe_id + + params["flaw__embargoed"] = flaw_embargoed + + json_flaw_impact: Union[Unset, str] = UNSET if not isinstance(flaw_impact, Unset): + json_flaw_impact = OsidbApiV1AffectsListFlawImpact(flaw_impact).value - json_flaw_impact = ( - OsidbApiV1AffectsListFlawImpact(flaw_impact).value if flaw_impact else None - ) + params["flaw__impact"] = json_flaw_impact - json_flaw_reported_dt: Union[Unset, None, str] = UNSET + json_flaw_reported_dt: Union[Unset, str] = UNSET if not isinstance(flaw_reported_dt, Unset): - json_flaw_reported_dt = ( - flaw_reported_dt.isoformat() if flaw_reported_dt else None - ) + json_flaw_reported_dt = flaw_reported_dt.isoformat() + + params["flaw__reported_dt"] = json_flaw_reported_dt - json_flaw_reported_dt_date: Union[Unset, None, str] = UNSET + json_flaw_reported_dt_date: Union[Unset, str] = UNSET if not isinstance(flaw_reported_dt_date, Unset): - json_flaw_reported_dt_date = ( - flaw_reported_dt_date.isoformat() if flaw_reported_dt_date else None - ) + json_flaw_reported_dt_date = flaw_reported_dt_date.isoformat() + + params["flaw__reported_dt__date"] = json_flaw_reported_dt_date - json_flaw_reported_dt_date_gte: Union[Unset, None, str] = UNSET + json_flaw_reported_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(flaw_reported_dt_date_gte, Unset): - json_flaw_reported_dt_date_gte = ( - flaw_reported_dt_date_gte.isoformat() if flaw_reported_dt_date_gte else None - ) + json_flaw_reported_dt_date_gte = flaw_reported_dt_date_gte.isoformat() - json_flaw_reported_dt_date_lte: Union[Unset, None, str] = UNSET + params["flaw__reported_dt__date__gte"] = json_flaw_reported_dt_date_gte + + json_flaw_reported_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(flaw_reported_dt_date_lte, Unset): - json_flaw_reported_dt_date_lte = ( - flaw_reported_dt_date_lte.isoformat() if flaw_reported_dt_date_lte else None - ) + json_flaw_reported_dt_date_lte = flaw_reported_dt_date_lte.isoformat() + + params["flaw__reported_dt__date__lte"] = json_flaw_reported_dt_date_lte - json_flaw_reported_dt_gt: Union[Unset, None, str] = UNSET + json_flaw_reported_dt_gt: Union[Unset, str] = UNSET if not isinstance(flaw_reported_dt_gt, Unset): - json_flaw_reported_dt_gt = ( - flaw_reported_dt_gt.isoformat() if flaw_reported_dt_gt else None - ) + json_flaw_reported_dt_gt = flaw_reported_dt_gt.isoformat() + + params["flaw__reported_dt__gt"] = json_flaw_reported_dt_gt - json_flaw_reported_dt_gte: Union[Unset, None, str] = UNSET + json_flaw_reported_dt_gte: Union[Unset, str] = UNSET if not isinstance(flaw_reported_dt_gte, Unset): - json_flaw_reported_dt_gte = ( - flaw_reported_dt_gte.isoformat() if flaw_reported_dt_gte else None - ) + json_flaw_reported_dt_gte = flaw_reported_dt_gte.isoformat() - json_flaw_reported_dt_lt: Union[Unset, None, str] = UNSET + params["flaw__reported_dt__gte"] = json_flaw_reported_dt_gte + + json_flaw_reported_dt_lt: Union[Unset, str] = UNSET if not isinstance(flaw_reported_dt_lt, Unset): - json_flaw_reported_dt_lt = ( - flaw_reported_dt_lt.isoformat() if flaw_reported_dt_lt else None - ) + json_flaw_reported_dt_lt = flaw_reported_dt_lt.isoformat() - json_flaw_reported_dt_lte: Union[Unset, None, str] = UNSET + params["flaw__reported_dt__lt"] = json_flaw_reported_dt_lt + + json_flaw_reported_dt_lte: Union[Unset, str] = UNSET if not isinstance(flaw_reported_dt_lte, Unset): - json_flaw_reported_dt_lte = ( - flaw_reported_dt_lte.isoformat() if flaw_reported_dt_lte else None - ) + json_flaw_reported_dt_lte = flaw_reported_dt_lte.isoformat() + + params["flaw__reported_dt__lte"] = json_flaw_reported_dt_lte - json_flaw_source: Union[Unset, None, str] = UNSET + json_flaw_source: Union[Unset, str] = UNSET if not isinstance(flaw_source, Unset): + json_flaw_source = OsidbApiV1AffectsListFlawSource(flaw_source).value - json_flaw_source = ( - OsidbApiV1AffectsListFlawSource(flaw_source).value if flaw_source else None - ) + params["flaw__source"] = json_flaw_source - json_flaw_unembargo_dt: Union[Unset, None, str] = UNSET + json_flaw_unembargo_dt: Union[Unset, str] = UNSET if not isinstance(flaw_unembargo_dt, Unset): - json_flaw_unembargo_dt = ( - flaw_unembargo_dt.isoformat() if flaw_unembargo_dt else None - ) + json_flaw_unembargo_dt = flaw_unembargo_dt.isoformat() + + params["flaw__unembargo_dt"] = json_flaw_unembargo_dt - json_flaw_updated_dt: Union[Unset, None, str] = UNSET + json_flaw_updated_dt: Union[Unset, str] = UNSET if not isinstance(flaw_updated_dt, Unset): - json_flaw_updated_dt = flaw_updated_dt.isoformat() if flaw_updated_dt else None + json_flaw_updated_dt = flaw_updated_dt.isoformat() - json_flaw_updated_dt_date: Union[Unset, None, str] = UNSET + params["flaw__updated_dt"] = json_flaw_updated_dt + + json_flaw_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(flaw_updated_dt_date, Unset): - json_flaw_updated_dt_date = ( - flaw_updated_dt_date.isoformat() if flaw_updated_dt_date else None - ) + json_flaw_updated_dt_date = flaw_updated_dt_date.isoformat() + + params["flaw__updated_dt__date"] = json_flaw_updated_dt_date - json_flaw_updated_dt_date_gte: Union[Unset, None, str] = UNSET + json_flaw_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(flaw_updated_dt_date_gte, Unset): - json_flaw_updated_dt_date_gte = ( - flaw_updated_dt_date_gte.isoformat() if flaw_updated_dt_date_gte else None - ) + json_flaw_updated_dt_date_gte = flaw_updated_dt_date_gte.isoformat() + + params["flaw__updated_dt__date__gte"] = json_flaw_updated_dt_date_gte - json_flaw_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_flaw_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(flaw_updated_dt_date_lte, Unset): - json_flaw_updated_dt_date_lte = ( - flaw_updated_dt_date_lte.isoformat() if flaw_updated_dt_date_lte else None - ) + json_flaw_updated_dt_date_lte = flaw_updated_dt_date_lte.isoformat() - json_flaw_updated_dt_gt: Union[Unset, None, str] = UNSET + params["flaw__updated_dt__date__lte"] = json_flaw_updated_dt_date_lte + + json_flaw_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(flaw_updated_dt_gt, Unset): - json_flaw_updated_dt_gt = ( - flaw_updated_dt_gt.isoformat() if flaw_updated_dt_gt else None - ) + json_flaw_updated_dt_gt = flaw_updated_dt_gt.isoformat() + + params["flaw__updated_dt__gt"] = json_flaw_updated_dt_gt - json_flaw_updated_dt_gte: Union[Unset, None, str] = UNSET + json_flaw_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(flaw_updated_dt_gte, Unset): - json_flaw_updated_dt_gte = ( - flaw_updated_dt_gte.isoformat() if flaw_updated_dt_gte else None - ) + json_flaw_updated_dt_gte = flaw_updated_dt_gte.isoformat() + + params["flaw__updated_dt__gte"] = json_flaw_updated_dt_gte - json_flaw_updated_dt_lt: Union[Unset, None, str] = UNSET + json_flaw_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(flaw_updated_dt_lt, Unset): - json_flaw_updated_dt_lt = ( - flaw_updated_dt_lt.isoformat() if flaw_updated_dt_lt else None - ) + json_flaw_updated_dt_lt = flaw_updated_dt_lt.isoformat() - json_flaw_updated_dt_lte: Union[Unset, None, str] = UNSET + params["flaw__updated_dt__lt"] = json_flaw_updated_dt_lt + + json_flaw_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(flaw_updated_dt_lte, Unset): - json_flaw_updated_dt_lte = ( - flaw_updated_dt_lte.isoformat() if flaw_updated_dt_lte else None - ) + json_flaw_updated_dt_lte = flaw_updated_dt_lte.isoformat() + + params["flaw__updated_dt__lte"] = json_flaw_updated_dt_lte + + json_flaw_uuid: Union[Unset, str] = UNSET + if not isinstance(flaw_uuid, Unset): + json_flaw_uuid = str(flaw_uuid) + + params["flaw__uuid"] = json_flaw_uuid - json_impact: Union[Unset, None, str] = UNSET + json_impact: Union[Unset, str] = UNSET if not isinstance(impact, Unset): + json_impact = OsidbApiV1AffectsListImpact(impact).value - json_impact = OsidbApiV1AffectsListImpact(impact).value if impact else None + params["impact"] = json_impact - json_include_fields: Union[Unset, None, List[str]] = UNSET + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields - json_include_meta_attr: Union[Unset, None, List[str]] = UNSET + json_include_meta_attr: Union[Unset, list[str]] = UNSET if not isinstance(include_meta_attr, Unset): - if include_meta_attr is None: - json_include_meta_attr = None - else: - json_include_meta_attr = include_meta_attr + json_include_meta_attr = include_meta_attr + + params["include_meta_attr"] = json_include_meta_attr + + params["limit"] = limit + + params["offset"] = offset - json_order: Union[Unset, None, List[str]] = UNSET + json_order: Union[Unset, list[str]] = UNSET if not isinstance(order, Unset): - if order is None: - json_order = None - else: - json_order = [] - for order_item_data in order: - order_item: str = UNSET - if not isinstance(order_item_data, Unset): + json_order = [] + for order_item_data in order: + order_item: str = UNSET + if not isinstance(order_item_data, Unset): + order_item = OsidbApiV1AffectsListOrderItem(order_item_data).value + + json_order.append(order_item) + + params["order"] = json_order - order_item = OsidbApiV1AffectsListOrderItem(order_item_data).value + params["ps_component"] = ps_component - json_order.append(order_item) + params["ps_module"] = ps_module - json_resolution: Union[Unset, None, str] = UNSET + json_resolution: Union[Unset, str] = UNSET if not isinstance(resolution, Unset): + json_resolution = OsidbApiV1AffectsListResolution(resolution).value - json_resolution = ( - OsidbApiV1AffectsListResolution(resolution).value if resolution else None - ) + params["resolution"] = json_resolution - json_trackers_created_dt: Union[Unset, None, str] = UNSET + json_trackers_created_dt: Union[Unset, str] = UNSET if not isinstance(trackers_created_dt, Unset): - json_trackers_created_dt = ( - trackers_created_dt.isoformat() if trackers_created_dt else None - ) + json_trackers_created_dt = trackers_created_dt.isoformat() - json_trackers_created_dt_date: Union[Unset, None, str] = UNSET + params["trackers__created_dt"] = json_trackers_created_dt + + json_trackers_created_dt_date: Union[Unset, str] = UNSET if not isinstance(trackers_created_dt_date, Unset): - json_trackers_created_dt_date = ( - trackers_created_dt_date.isoformat() if trackers_created_dt_date else None - ) + json_trackers_created_dt_date = trackers_created_dt_date.isoformat() + + params["trackers__created_dt__date"] = json_trackers_created_dt_date - json_trackers_created_dt_date_gte: Union[Unset, None, str] = UNSET + json_trackers_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(trackers_created_dt_date_gte, Unset): - json_trackers_created_dt_date_gte = ( - trackers_created_dt_date_gte.isoformat() - if trackers_created_dt_date_gte - else None - ) + json_trackers_created_dt_date_gte = trackers_created_dt_date_gte.isoformat() + + params["trackers__created_dt__date__gte"] = json_trackers_created_dt_date_gte - json_trackers_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_trackers_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(trackers_created_dt_date_lte, Unset): - json_trackers_created_dt_date_lte = ( - trackers_created_dt_date_lte.isoformat() - if trackers_created_dt_date_lte - else None - ) + json_trackers_created_dt_date_lte = trackers_created_dt_date_lte.isoformat() - json_trackers_created_dt_gt: Union[Unset, None, str] = UNSET + params["trackers__created_dt__date__lte"] = json_trackers_created_dt_date_lte + + json_trackers_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(trackers_created_dt_gt, Unset): - json_trackers_created_dt_gt = ( - trackers_created_dt_gt.isoformat() if trackers_created_dt_gt else None - ) + json_trackers_created_dt_gt = trackers_created_dt_gt.isoformat() + + params["trackers__created_dt__gt"] = json_trackers_created_dt_gt - json_trackers_created_dt_gte: Union[Unset, None, str] = UNSET + json_trackers_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(trackers_created_dt_gte, Unset): - json_trackers_created_dt_gte = ( - trackers_created_dt_gte.isoformat() if trackers_created_dt_gte else None - ) + json_trackers_created_dt_gte = trackers_created_dt_gte.isoformat() + + params["trackers__created_dt__gte"] = json_trackers_created_dt_gte - json_trackers_created_dt_lt: Union[Unset, None, str] = UNSET + json_trackers_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(trackers_created_dt_lt, Unset): - json_trackers_created_dt_lt = ( - trackers_created_dt_lt.isoformat() if trackers_created_dt_lt else None - ) + json_trackers_created_dt_lt = trackers_created_dt_lt.isoformat() + + params["trackers__created_dt__lt"] = json_trackers_created_dt_lt - json_trackers_created_dt_lte: Union[Unset, None, str] = UNSET + json_trackers_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(trackers_created_dt_lte, Unset): - json_trackers_created_dt_lte = ( - trackers_created_dt_lte.isoformat() if trackers_created_dt_lte else None - ) + json_trackers_created_dt_lte = trackers_created_dt_lte.isoformat() + + params["trackers__created_dt__lte"] = json_trackers_created_dt_lte + + params["trackers__embargoed"] = trackers_embargoed + + params["trackers__external_system_id"] = trackers_external_system_id - json_trackers_type: Union[Unset, None, str] = UNSET + params["trackers__ps_update_stream"] = trackers_ps_update_stream + + params["trackers__resolution"] = trackers_resolution + + params["trackers__status"] = trackers_status + + json_trackers_type: Union[Unset, str] = UNSET if not isinstance(trackers_type, Unset): + json_trackers_type = OsidbApiV1AffectsListTrackersType(trackers_type).value - json_trackers_type = ( - OsidbApiV1AffectsListTrackersType(trackers_type).value - if trackers_type - else None - ) + params["trackers__type"] = json_trackers_type - json_trackers_updated_dt: Union[Unset, None, str] = UNSET + json_trackers_updated_dt: Union[Unset, str] = UNSET if not isinstance(trackers_updated_dt, Unset): - json_trackers_updated_dt = ( - trackers_updated_dt.isoformat() if trackers_updated_dt else None - ) + json_trackers_updated_dt = trackers_updated_dt.isoformat() + + params["trackers__updated_dt"] = json_trackers_updated_dt - json_trackers_updated_dt_date: Union[Unset, None, str] = UNSET + json_trackers_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(trackers_updated_dt_date, Unset): - json_trackers_updated_dt_date = ( - trackers_updated_dt_date.isoformat() if trackers_updated_dt_date else None - ) + json_trackers_updated_dt_date = trackers_updated_dt_date.isoformat() + + params["trackers__updated_dt__date"] = json_trackers_updated_dt_date - json_trackers_updated_dt_date_gte: Union[Unset, None, str] = UNSET + json_trackers_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(trackers_updated_dt_date_gte, Unset): - json_trackers_updated_dt_date_gte = ( - trackers_updated_dt_date_gte.isoformat() - if trackers_updated_dt_date_gte - else None - ) + json_trackers_updated_dt_date_gte = trackers_updated_dt_date_gte.isoformat() - json_trackers_updated_dt_date_lte: Union[Unset, None, str] = UNSET + params["trackers__updated_dt__date__gte"] = json_trackers_updated_dt_date_gte + + json_trackers_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(trackers_updated_dt_date_lte, Unset): - json_trackers_updated_dt_date_lte = ( - trackers_updated_dt_date_lte.isoformat() - if trackers_updated_dt_date_lte - else None - ) + json_trackers_updated_dt_date_lte = trackers_updated_dt_date_lte.isoformat() + + params["trackers__updated_dt__date__lte"] = json_trackers_updated_dt_date_lte - json_trackers_updated_dt_gt: Union[Unset, None, str] = UNSET + json_trackers_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(trackers_updated_dt_gt, Unset): - json_trackers_updated_dt_gt = ( - trackers_updated_dt_gt.isoformat() if trackers_updated_dt_gt else None - ) + json_trackers_updated_dt_gt = trackers_updated_dt_gt.isoformat() + + params["trackers__updated_dt__gt"] = json_trackers_updated_dt_gt - json_trackers_updated_dt_gte: Union[Unset, None, str] = UNSET + json_trackers_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(trackers_updated_dt_gte, Unset): - json_trackers_updated_dt_gte = ( - trackers_updated_dt_gte.isoformat() if trackers_updated_dt_gte else None - ) + json_trackers_updated_dt_gte = trackers_updated_dt_gte.isoformat() - json_trackers_updated_dt_lt: Union[Unset, None, str] = UNSET + params["trackers__updated_dt__gte"] = json_trackers_updated_dt_gte + + json_trackers_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(trackers_updated_dt_lt, Unset): - json_trackers_updated_dt_lt = ( - trackers_updated_dt_lt.isoformat() if trackers_updated_dt_lt else None - ) + json_trackers_updated_dt_lt = trackers_updated_dt_lt.isoformat() - json_trackers_updated_dt_lte: Union[Unset, None, str] = UNSET + params["trackers__updated_dt__lt"] = json_trackers_updated_dt_lt + + json_trackers_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(trackers_updated_dt_lte, Unset): - json_trackers_updated_dt_lte = ( - trackers_updated_dt_lte.isoformat() if trackers_updated_dt_lte else None - ) + json_trackers_updated_dt_lte = trackers_updated_dt_lte.isoformat() + + params["trackers__updated_dt__lte"] = json_trackers_updated_dt_lte - json_updated_dt: Union[Unset, None, str] = UNSET + json_trackers_uuid: Union[Unset, str] = UNSET + if not isinstance(trackers_uuid, Unset): + json_trackers_uuid = str(trackers_uuid) + + params["trackers__uuid"] = json_trackers_uuid + + json_updated_dt: Union[Unset, str] = UNSET if not isinstance(updated_dt, Unset): - json_updated_dt = updated_dt.isoformat() if updated_dt else None + json_updated_dt = updated_dt.isoformat() + + params["updated_dt"] = json_updated_dt - json_updated_dt_date: Union[Unset, None, str] = UNSET + json_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(updated_dt_date, Unset): - json_updated_dt_date = updated_dt_date.isoformat() if updated_dt_date else None + json_updated_dt_date = updated_dt_date.isoformat() - json_updated_dt_date_gte: Union[Unset, None, str] = UNSET + params["updated_dt__date"] = json_updated_dt_date + + json_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_gte, Unset): - json_updated_dt_date_gte = ( - updated_dt_date_gte.isoformat() if updated_dt_date_gte else None - ) + json_updated_dt_date_gte = updated_dt_date_gte.isoformat() + + params["updated_dt__date__gte"] = json_updated_dt_date_gte - json_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_lte, Unset): - json_updated_dt_date_lte = ( - updated_dt_date_lte.isoformat() if updated_dt_date_lte else None - ) + json_updated_dt_date_lte = updated_dt_date_lte.isoformat() + + params["updated_dt__date__lte"] = json_updated_dt_date_lte - json_updated_dt_gt: Union[Unset, None, str] = UNSET + json_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(updated_dt_gt, Unset): - json_updated_dt_gt = updated_dt_gt.isoformat() if updated_dt_gt else None + json_updated_dt_gt = updated_dt_gt.isoformat() - json_updated_dt_gte: Union[Unset, None, str] = UNSET + params["updated_dt__gt"] = json_updated_dt_gt + + json_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_gte, Unset): - json_updated_dt_gte = updated_dt_gte.isoformat() if updated_dt_gte else None + json_updated_dt_gte = updated_dt_gte.isoformat() + + params["updated_dt__gte"] = json_updated_dt_gte - json_updated_dt_lt: Union[Unset, None, str] = UNSET + json_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(updated_dt_lt, Unset): - json_updated_dt_lt = updated_dt_lt.isoformat() if updated_dt_lt else None + json_updated_dt_lt = updated_dt_lt.isoformat() + + params["updated_dt__lt"] = json_updated_dt_lt - json_updated_dt_lte: Union[Unset, None, str] = UNSET + json_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_lte, Unset): - json_updated_dt_lte = updated_dt_lte.isoformat() if updated_dt_lte else None - - params: Dict[str, Any] = { - "affectedness": json_affectedness, - "created_dt": json_created_dt, - "created_dt__date": json_created_dt_date, - "created_dt__date__gte": json_created_dt_date_gte, - "created_dt__date__lte": json_created_dt_date_lte, - "created_dt__gt": json_created_dt_gt, - "created_dt__gte": json_created_dt_gte, - "created_dt__lt": json_created_dt_lt, - "created_dt__lte": json_created_dt_lte, - "cvss_scores__comment": cvss_scores_comment, - "cvss_scores__created_dt": json_cvss_scores_created_dt, - "cvss_scores__created_dt__date": json_cvss_scores_created_dt_date, - "cvss_scores__created_dt__date__gte": json_cvss_scores_created_dt_date_gte, - "cvss_scores__created_dt__date__lte": json_cvss_scores_created_dt_date_lte, - "cvss_scores__created_dt__gt": json_cvss_scores_created_dt_gt, - "cvss_scores__created_dt__gte": json_cvss_scores_created_dt_gte, - "cvss_scores__created_dt__lt": json_cvss_scores_created_dt_lt, - "cvss_scores__created_dt__lte": json_cvss_scores_created_dt_lte, - "cvss_scores__cvss_version": cvss_scores_cvss_version, - "cvss_scores__issuer": json_cvss_scores_issuer, - "cvss_scores__score": cvss_scores_score, - "cvss_scores__updated_dt": json_cvss_scores_updated_dt, - "cvss_scores__updated_dt__date": json_cvss_scores_updated_dt_date, - "cvss_scores__updated_dt__date__gte": json_cvss_scores_updated_dt_date_gte, - "cvss_scores__updated_dt__date__lte": json_cvss_scores_updated_dt_date_lte, - "cvss_scores__updated_dt__gt": json_cvss_scores_updated_dt_gt, - "cvss_scores__updated_dt__gte": json_cvss_scores_updated_dt_gte, - "cvss_scores__updated_dt__lt": json_cvss_scores_updated_dt_lt, - "cvss_scores__updated_dt__lte": json_cvss_scores_updated_dt_lte, - "cvss_scores__uuid": cvss_scores_uuid, - "cvss_scores__vector": cvss_scores_vector, - "embargoed": embargoed, - "exclude_fields": json_exclude_fields, - "flaw__components": json_flaw_components, - "flaw__created_dt": json_flaw_created_dt, - "flaw__created_dt__date": json_flaw_created_dt_date, - "flaw__created_dt__date__gte": json_flaw_created_dt_date_gte, - "flaw__created_dt__date__lte": json_flaw_created_dt_date_lte, - "flaw__created_dt__gt": json_flaw_created_dt_gt, - "flaw__created_dt__gte": json_flaw_created_dt_gte, - "flaw__created_dt__lt": json_flaw_created_dt_lt, - "flaw__created_dt__lte": json_flaw_created_dt_lte, - "flaw__cve_id": flaw_cve_id, - "flaw__cwe_id": flaw_cwe_id, - "flaw__embargoed": flaw_embargoed, - "flaw__impact": json_flaw_impact, - "flaw__reported_dt": json_flaw_reported_dt, - "flaw__reported_dt__date": json_flaw_reported_dt_date, - "flaw__reported_dt__date__gte": json_flaw_reported_dt_date_gte, - "flaw__reported_dt__date__lte": json_flaw_reported_dt_date_lte, - "flaw__reported_dt__gt": json_flaw_reported_dt_gt, - "flaw__reported_dt__gte": json_flaw_reported_dt_gte, - "flaw__reported_dt__lt": json_flaw_reported_dt_lt, - "flaw__reported_dt__lte": json_flaw_reported_dt_lte, - "flaw__source": json_flaw_source, - "flaw__unembargo_dt": json_flaw_unembargo_dt, - "flaw__updated_dt": json_flaw_updated_dt, - "flaw__updated_dt__date": json_flaw_updated_dt_date, - "flaw__updated_dt__date__gte": json_flaw_updated_dt_date_gte, - "flaw__updated_dt__date__lte": json_flaw_updated_dt_date_lte, - "flaw__updated_dt__gt": json_flaw_updated_dt_gt, - "flaw__updated_dt__gte": json_flaw_updated_dt_gte, - "flaw__updated_dt__lt": json_flaw_updated_dt_lt, - "flaw__updated_dt__lte": json_flaw_updated_dt_lte, - "flaw__uuid": flaw_uuid, - "impact": json_impact, - "include_fields": json_include_fields, - "include_meta_attr": json_include_meta_attr, - "limit": limit, - "offset": offset, - "order": json_order, - "ps_component": ps_component, - "ps_module": ps_module, - "resolution": json_resolution, - "trackers__created_dt": json_trackers_created_dt, - "trackers__created_dt__date": json_trackers_created_dt_date, - "trackers__created_dt__date__gte": json_trackers_created_dt_date_gte, - "trackers__created_dt__date__lte": json_trackers_created_dt_date_lte, - "trackers__created_dt__gt": json_trackers_created_dt_gt, - "trackers__created_dt__gte": json_trackers_created_dt_gte, - "trackers__created_dt__lt": json_trackers_created_dt_lt, - "trackers__created_dt__lte": json_trackers_created_dt_lte, - "trackers__embargoed": trackers_embargoed, - "trackers__external_system_id": trackers_external_system_id, - "trackers__ps_update_stream": trackers_ps_update_stream, - "trackers__resolution": trackers_resolution, - "trackers__status": trackers_status, - "trackers__type": json_trackers_type, - "trackers__updated_dt": json_trackers_updated_dt, - "trackers__updated_dt__date": json_trackers_updated_dt_date, - "trackers__updated_dt__date__gte": json_trackers_updated_dt_date_gte, - "trackers__updated_dt__date__lte": json_trackers_updated_dt_date_lte, - "trackers__updated_dt__gt": json_trackers_updated_dt_gt, - "trackers__updated_dt__gte": json_trackers_updated_dt_gte, - "trackers__updated_dt__lt": json_trackers_updated_dt_lt, - "trackers__updated_dt__lte": json_trackers_updated_dt_lte, - "trackers__uuid": trackers_uuid, - "updated_dt": json_updated_dt, - "updated_dt__date": json_updated_dt_date, - "updated_dt__date__gte": json_updated_dt_date_gte, - "updated_dt__date__lte": json_updated_dt_date_lte, - "updated_dt__gt": json_updated_dt_gt, - "updated_dt__gte": json_updated_dt_gte, - "updated_dt__lt": json_updated_dt_lt, - "updated_dt__lte": json_updated_dt_lte, - "uuid": uuid, - } + json_updated_dt_lte = updated_dt_lte.isoformat() + + params["updated_dt__lte"] = json_updated_dt_lte + + json_uuid: Union[Unset, str] = UNSET + if not isinstance(uuid, Unset): + json_uuid = str(uuid) + + params["uuid"] = json_uuid + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/affects", "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AffectsListResponse200 if isinstance(_response_200, Unset): @@ -921,132 +852,246 @@ def _parse_response( response_200 = OsidbApiV1AffectsListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - affectedness: Union[Unset, None, OsidbApiV1AffectsListAffectedness] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_comment: Union[Unset, None, str] = UNSET, - cvss_scores_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_cvss_version: Union[Unset, None, str] = UNSET, - cvss_scores_issuer: Union[ - Unset, None, OsidbApiV1AffectsListCvssScoresIssuer - ] = UNSET, - cvss_scores_score: Union[Unset, None, float] = UNSET, - cvss_scores_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_uuid: Union[Unset, None, str] = UNSET, - cvss_scores_vector: Union[Unset, None, str] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - flaw_components: Union[Unset, None, List[str]] = UNSET, - flaw_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_cve_id: Union[Unset, None, str] = UNSET, - flaw_cwe_id: Union[Unset, None, str] = UNSET, - flaw_embargoed: Union[Unset, None, bool] = UNSET, - flaw_impact: Union[Unset, None, OsidbApiV1AffectsListFlawImpact] = UNSET, - flaw_reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_source: Union[Unset, None, OsidbApiV1AffectsListFlawSource] = UNSET, - flaw_unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_uuid: Union[Unset, None, str] = UNSET, - impact: Union[Unset, None, OsidbApiV1AffectsListImpact] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1AffectsListOrderItem]] = UNSET, - ps_component: Union[Unset, None, str] = UNSET, - ps_module: Union[Unset, None, str] = UNSET, - resolution: Union[Unset, None, OsidbApiV1AffectsListResolution] = UNSET, - trackers_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_embargoed: Union[Unset, None, bool] = UNSET, - trackers_external_system_id: Union[Unset, None, str] = UNSET, - trackers_ps_update_stream: Union[Unset, None, str] = UNSET, - trackers_resolution: Union[Unset, None, str] = UNSET, - trackers_status: Union[Unset, None, str] = UNSET, - trackers_type: Union[Unset, None, OsidbApiV1AffectsListTrackersType] = UNSET, - trackers_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_uuid: Union[Unset, None, str] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + affectedness: Union[Unset, OsidbApiV1AffectsListAffectedness] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_comment: Union[Unset, str] = UNSET, + cvss_scores_created_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_cvss_version: Union[Unset, str] = UNSET, + cvss_scores_issuer: Union[Unset, OsidbApiV1AffectsListCvssScoresIssuer] = UNSET, + cvss_scores_score: Union[Unset, float] = UNSET, + cvss_scores_updated_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_uuid: Union[Unset, UUID] = UNSET, + cvss_scores_vector: Union[Unset, str] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + flaw_components: Union[Unset, list[str]] = UNSET, + flaw_created_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_cve_id: Union[Unset, str] = UNSET, + flaw_cwe_id: Union[Unset, str] = UNSET, + flaw_embargoed: Union[Unset, bool] = UNSET, + flaw_impact: Union[Unset, OsidbApiV1AffectsListFlawImpact] = UNSET, + flaw_reported_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_source: Union[Unset, OsidbApiV1AffectsListFlawSource] = UNSET, + flaw_unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_uuid: Union[Unset, UUID] = UNSET, + impact: Union[Unset, OsidbApiV1AffectsListImpact] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1AffectsListOrderItem]] = UNSET, + ps_component: Union[Unset, str] = UNSET, + ps_module: Union[Unset, str] = UNSET, + resolution: Union[Unset, OsidbApiV1AffectsListResolution] = UNSET, + trackers_created_dt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_date: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + trackers_embargoed: Union[Unset, bool] = UNSET, + trackers_external_system_id: Union[Unset, str] = UNSET, + trackers_ps_update_stream: Union[Unset, str] = UNSET, + trackers_resolution: Union[Unset, str] = UNSET, + trackers_status: Union[Unset, str] = UNSET, + trackers_type: Union[Unset, OsidbApiV1AffectsListTrackersType] = UNSET, + trackers_updated_dt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_date: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + trackers_uuid: Union[Unset, UUID] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Response[OsidbApiV1AffectsListResponse200]: + """ + Args: + affectedness (Union[Unset, OsidbApiV1AffectsListAffectedness]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_comment (Union[Unset, str]): + cvss_scores_created_dt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_date (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_created_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_cvss_version (Union[Unset, str]): + cvss_scores_issuer (Union[Unset, OsidbApiV1AffectsListCvssScoresIssuer]): + cvss_scores_score (Union[Unset, float]): + cvss_scores_updated_dt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_date (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_uuid (Union[Unset, UUID]): + cvss_scores_vector (Union[Unset, str]): + embargoed (Union[Unset, bool]): + exclude_fields (Union[Unset, list[str]]): + flaw_components (Union[Unset, list[str]]): + flaw_created_dt (Union[Unset, datetime.datetime]): + flaw_created_dt_date (Union[Unset, datetime.date]): + flaw_created_dt_date_gte (Union[Unset, datetime.date]): + flaw_created_dt_date_lte (Union[Unset, datetime.date]): + flaw_created_dt_gt (Union[Unset, datetime.datetime]): + flaw_created_dt_gte (Union[Unset, datetime.datetime]): + flaw_created_dt_lt (Union[Unset, datetime.datetime]): + flaw_created_dt_lte (Union[Unset, datetime.datetime]): + flaw_cve_id (Union[Unset, str]): + flaw_cwe_id (Union[Unset, str]): + flaw_embargoed (Union[Unset, bool]): + flaw_impact (Union[Unset, OsidbApiV1AffectsListFlawImpact]): + flaw_reported_dt (Union[Unset, datetime.datetime]): + flaw_reported_dt_date (Union[Unset, datetime.date]): + flaw_reported_dt_date_gte (Union[Unset, datetime.date]): + flaw_reported_dt_date_lte (Union[Unset, datetime.date]): + flaw_reported_dt_gt (Union[Unset, datetime.datetime]): + flaw_reported_dt_gte (Union[Unset, datetime.datetime]): + flaw_reported_dt_lt (Union[Unset, datetime.datetime]): + flaw_reported_dt_lte (Union[Unset, datetime.datetime]): + flaw_source (Union[Unset, OsidbApiV1AffectsListFlawSource]): + flaw_unembargo_dt (Union[Unset, datetime.datetime]): + flaw_updated_dt (Union[Unset, datetime.datetime]): + flaw_updated_dt_date (Union[Unset, datetime.date]): + flaw_updated_dt_date_gte (Union[Unset, datetime.date]): + flaw_updated_dt_date_lte (Union[Unset, datetime.date]): + flaw_updated_dt_gt (Union[Unset, datetime.datetime]): + flaw_updated_dt_gte (Union[Unset, datetime.datetime]): + flaw_updated_dt_lt (Union[Unset, datetime.datetime]): + flaw_updated_dt_lte (Union[Unset, datetime.datetime]): + flaw_uuid (Union[Unset, UUID]): + impact (Union[Unset, OsidbApiV1AffectsListImpact]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + order (Union[Unset, list[OsidbApiV1AffectsListOrderItem]]): + ps_component (Union[Unset, str]): + ps_module (Union[Unset, str]): + resolution (Union[Unset, OsidbApiV1AffectsListResolution]): + trackers_created_dt (Union[Unset, datetime.datetime]): + trackers_created_dt_date (Union[Unset, datetime.date]): + trackers_created_dt_date_gte (Union[Unset, datetime.date]): + trackers_created_dt_date_lte (Union[Unset, datetime.date]): + trackers_created_dt_gt (Union[Unset, datetime.datetime]): + trackers_created_dt_gte (Union[Unset, datetime.datetime]): + trackers_created_dt_lt (Union[Unset, datetime.datetime]): + trackers_created_dt_lte (Union[Unset, datetime.datetime]): + trackers_embargoed (Union[Unset, bool]): + trackers_external_system_id (Union[Unset, str]): + trackers_ps_update_stream (Union[Unset, str]): + trackers_resolution (Union[Unset, str]): + trackers_status (Union[Unset, str]): + trackers_type (Union[Unset, OsidbApiV1AffectsListTrackersType]): + trackers_updated_dt (Union[Unset, datetime.datetime]): + trackers_updated_dt_date (Union[Unset, datetime.date]): + trackers_updated_dt_date_gte (Union[Unset, datetime.date]): + trackers_updated_dt_date_lte (Union[Unset, datetime.date]): + trackers_updated_dt_gt (Union[Unset, datetime.datetime]): + trackers_updated_dt_gte (Union[Unset, datetime.datetime]): + trackers_updated_dt_lt (Union[Unset, datetime.datetime]): + trackers_updated_dt_lte (Union[Unset, datetime.datetime]): + trackers_uuid (Union[Unset, UUID]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsListResponse200] + """ + kwargs = _get_kwargs( client=client, affectedness=affectedness, @@ -1165,122 +1210,235 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - affectedness: Union[Unset, None, OsidbApiV1AffectsListAffectedness] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_comment: Union[Unset, None, str] = UNSET, - cvss_scores_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_cvss_version: Union[Unset, None, str] = UNSET, - cvss_scores_issuer: Union[ - Unset, None, OsidbApiV1AffectsListCvssScoresIssuer - ] = UNSET, - cvss_scores_score: Union[Unset, None, float] = UNSET, - cvss_scores_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_uuid: Union[Unset, None, str] = UNSET, - cvss_scores_vector: Union[Unset, None, str] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - flaw_components: Union[Unset, None, List[str]] = UNSET, - flaw_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_cve_id: Union[Unset, None, str] = UNSET, - flaw_cwe_id: Union[Unset, None, str] = UNSET, - flaw_embargoed: Union[Unset, None, bool] = UNSET, - flaw_impact: Union[Unset, None, OsidbApiV1AffectsListFlawImpact] = UNSET, - flaw_reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_source: Union[Unset, None, OsidbApiV1AffectsListFlawSource] = UNSET, - flaw_unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_uuid: Union[Unset, None, str] = UNSET, - impact: Union[Unset, None, OsidbApiV1AffectsListImpact] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1AffectsListOrderItem]] = UNSET, - ps_component: Union[Unset, None, str] = UNSET, - ps_module: Union[Unset, None, str] = UNSET, - resolution: Union[Unset, None, OsidbApiV1AffectsListResolution] = UNSET, - trackers_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_embargoed: Union[Unset, None, bool] = UNSET, - trackers_external_system_id: Union[Unset, None, str] = UNSET, - trackers_ps_update_stream: Union[Unset, None, str] = UNSET, - trackers_resolution: Union[Unset, None, str] = UNSET, - trackers_status: Union[Unset, None, str] = UNSET, - trackers_type: Union[Unset, None, OsidbApiV1AffectsListTrackersType] = UNSET, - trackers_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_uuid: Union[Unset, None, str] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + affectedness: Union[Unset, OsidbApiV1AffectsListAffectedness] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_comment: Union[Unset, str] = UNSET, + cvss_scores_created_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_cvss_version: Union[Unset, str] = UNSET, + cvss_scores_issuer: Union[Unset, OsidbApiV1AffectsListCvssScoresIssuer] = UNSET, + cvss_scores_score: Union[Unset, float] = UNSET, + cvss_scores_updated_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_uuid: Union[Unset, UUID] = UNSET, + cvss_scores_vector: Union[Unset, str] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + flaw_components: Union[Unset, list[str]] = UNSET, + flaw_created_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_cve_id: Union[Unset, str] = UNSET, + flaw_cwe_id: Union[Unset, str] = UNSET, + flaw_embargoed: Union[Unset, bool] = UNSET, + flaw_impact: Union[Unset, OsidbApiV1AffectsListFlawImpact] = UNSET, + flaw_reported_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_source: Union[Unset, OsidbApiV1AffectsListFlawSource] = UNSET, + flaw_unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_uuid: Union[Unset, UUID] = UNSET, + impact: Union[Unset, OsidbApiV1AffectsListImpact] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1AffectsListOrderItem]] = UNSET, + ps_component: Union[Unset, str] = UNSET, + ps_module: Union[Unset, str] = UNSET, + resolution: Union[Unset, OsidbApiV1AffectsListResolution] = UNSET, + trackers_created_dt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_date: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + trackers_embargoed: Union[Unset, bool] = UNSET, + trackers_external_system_id: Union[Unset, str] = UNSET, + trackers_ps_update_stream: Union[Unset, str] = UNSET, + trackers_resolution: Union[Unset, str] = UNSET, + trackers_status: Union[Unset, str] = UNSET, + trackers_type: Union[Unset, OsidbApiV1AffectsListTrackersType] = UNSET, + trackers_updated_dt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_date: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + trackers_uuid: Union[Unset, UUID] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Optional[OsidbApiV1AffectsListResponse200]: - """ """ + """ + Args: + affectedness (Union[Unset, OsidbApiV1AffectsListAffectedness]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_comment (Union[Unset, str]): + cvss_scores_created_dt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_date (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_created_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_cvss_version (Union[Unset, str]): + cvss_scores_issuer (Union[Unset, OsidbApiV1AffectsListCvssScoresIssuer]): + cvss_scores_score (Union[Unset, float]): + cvss_scores_updated_dt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_date (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_uuid (Union[Unset, UUID]): + cvss_scores_vector (Union[Unset, str]): + embargoed (Union[Unset, bool]): + exclude_fields (Union[Unset, list[str]]): + flaw_components (Union[Unset, list[str]]): + flaw_created_dt (Union[Unset, datetime.datetime]): + flaw_created_dt_date (Union[Unset, datetime.date]): + flaw_created_dt_date_gte (Union[Unset, datetime.date]): + flaw_created_dt_date_lte (Union[Unset, datetime.date]): + flaw_created_dt_gt (Union[Unset, datetime.datetime]): + flaw_created_dt_gte (Union[Unset, datetime.datetime]): + flaw_created_dt_lt (Union[Unset, datetime.datetime]): + flaw_created_dt_lte (Union[Unset, datetime.datetime]): + flaw_cve_id (Union[Unset, str]): + flaw_cwe_id (Union[Unset, str]): + flaw_embargoed (Union[Unset, bool]): + flaw_impact (Union[Unset, OsidbApiV1AffectsListFlawImpact]): + flaw_reported_dt (Union[Unset, datetime.datetime]): + flaw_reported_dt_date (Union[Unset, datetime.date]): + flaw_reported_dt_date_gte (Union[Unset, datetime.date]): + flaw_reported_dt_date_lte (Union[Unset, datetime.date]): + flaw_reported_dt_gt (Union[Unset, datetime.datetime]): + flaw_reported_dt_gte (Union[Unset, datetime.datetime]): + flaw_reported_dt_lt (Union[Unset, datetime.datetime]): + flaw_reported_dt_lte (Union[Unset, datetime.datetime]): + flaw_source (Union[Unset, OsidbApiV1AffectsListFlawSource]): + flaw_unembargo_dt (Union[Unset, datetime.datetime]): + flaw_updated_dt (Union[Unset, datetime.datetime]): + flaw_updated_dt_date (Union[Unset, datetime.date]): + flaw_updated_dt_date_gte (Union[Unset, datetime.date]): + flaw_updated_dt_date_lte (Union[Unset, datetime.date]): + flaw_updated_dt_gt (Union[Unset, datetime.datetime]): + flaw_updated_dt_gte (Union[Unset, datetime.datetime]): + flaw_updated_dt_lt (Union[Unset, datetime.datetime]): + flaw_updated_dt_lte (Union[Unset, datetime.datetime]): + flaw_uuid (Union[Unset, UUID]): + impact (Union[Unset, OsidbApiV1AffectsListImpact]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + order (Union[Unset, list[OsidbApiV1AffectsListOrderItem]]): + ps_component (Union[Unset, str]): + ps_module (Union[Unset, str]): + resolution (Union[Unset, OsidbApiV1AffectsListResolution]): + trackers_created_dt (Union[Unset, datetime.datetime]): + trackers_created_dt_date (Union[Unset, datetime.date]): + trackers_created_dt_date_gte (Union[Unset, datetime.date]): + trackers_created_dt_date_lte (Union[Unset, datetime.date]): + trackers_created_dt_gt (Union[Unset, datetime.datetime]): + trackers_created_dt_gte (Union[Unset, datetime.datetime]): + trackers_created_dt_lt (Union[Unset, datetime.datetime]): + trackers_created_dt_lte (Union[Unset, datetime.datetime]): + trackers_embargoed (Union[Unset, bool]): + trackers_external_system_id (Union[Unset, str]): + trackers_ps_update_stream (Union[Unset, str]): + trackers_resolution (Union[Unset, str]): + trackers_status (Union[Unset, str]): + trackers_type (Union[Unset, OsidbApiV1AffectsListTrackersType]): + trackers_updated_dt (Union[Unset, datetime.datetime]): + trackers_updated_dt_date (Union[Unset, datetime.date]): + trackers_updated_dt_date_gte (Union[Unset, datetime.date]): + trackers_updated_dt_date_lte (Union[Unset, datetime.date]): + trackers_updated_dt_gt (Union[Unset, datetime.datetime]): + trackers_updated_dt_gte (Union[Unset, datetime.datetime]): + trackers_updated_dt_lt (Union[Unset, datetime.datetime]): + trackers_updated_dt_lte (Union[Unset, datetime.datetime]): + trackers_uuid (Union[Unset, UUID]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsListResponse200 + """ return sync_detailed( client=client, @@ -1393,118 +1551,233 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - affectedness: Union[Unset, None, OsidbApiV1AffectsListAffectedness] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_comment: Union[Unset, None, str] = UNSET, - cvss_scores_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_cvss_version: Union[Unset, None, str] = UNSET, - cvss_scores_issuer: Union[ - Unset, None, OsidbApiV1AffectsListCvssScoresIssuer - ] = UNSET, - cvss_scores_score: Union[Unset, None, float] = UNSET, - cvss_scores_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_uuid: Union[Unset, None, str] = UNSET, - cvss_scores_vector: Union[Unset, None, str] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - flaw_components: Union[Unset, None, List[str]] = UNSET, - flaw_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_cve_id: Union[Unset, None, str] = UNSET, - flaw_cwe_id: Union[Unset, None, str] = UNSET, - flaw_embargoed: Union[Unset, None, bool] = UNSET, - flaw_impact: Union[Unset, None, OsidbApiV1AffectsListFlawImpact] = UNSET, - flaw_reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_source: Union[Unset, None, OsidbApiV1AffectsListFlawSource] = UNSET, - flaw_unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_uuid: Union[Unset, None, str] = UNSET, - impact: Union[Unset, None, OsidbApiV1AffectsListImpact] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1AffectsListOrderItem]] = UNSET, - ps_component: Union[Unset, None, str] = UNSET, - ps_module: Union[Unset, None, str] = UNSET, - resolution: Union[Unset, None, OsidbApiV1AffectsListResolution] = UNSET, - trackers_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_embargoed: Union[Unset, None, bool] = UNSET, - trackers_external_system_id: Union[Unset, None, str] = UNSET, - trackers_ps_update_stream: Union[Unset, None, str] = UNSET, - trackers_resolution: Union[Unset, None, str] = UNSET, - trackers_status: Union[Unset, None, str] = UNSET, - trackers_type: Union[Unset, None, OsidbApiV1AffectsListTrackersType] = UNSET, - trackers_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_uuid: Union[Unset, None, str] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + affectedness: Union[Unset, OsidbApiV1AffectsListAffectedness] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_comment: Union[Unset, str] = UNSET, + cvss_scores_created_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_cvss_version: Union[Unset, str] = UNSET, + cvss_scores_issuer: Union[Unset, OsidbApiV1AffectsListCvssScoresIssuer] = UNSET, + cvss_scores_score: Union[Unset, float] = UNSET, + cvss_scores_updated_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_uuid: Union[Unset, UUID] = UNSET, + cvss_scores_vector: Union[Unset, str] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + flaw_components: Union[Unset, list[str]] = UNSET, + flaw_created_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_cve_id: Union[Unset, str] = UNSET, + flaw_cwe_id: Union[Unset, str] = UNSET, + flaw_embargoed: Union[Unset, bool] = UNSET, + flaw_impact: Union[Unset, OsidbApiV1AffectsListFlawImpact] = UNSET, + flaw_reported_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_source: Union[Unset, OsidbApiV1AffectsListFlawSource] = UNSET, + flaw_unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_uuid: Union[Unset, UUID] = UNSET, + impact: Union[Unset, OsidbApiV1AffectsListImpact] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1AffectsListOrderItem]] = UNSET, + ps_component: Union[Unset, str] = UNSET, + ps_module: Union[Unset, str] = UNSET, + resolution: Union[Unset, OsidbApiV1AffectsListResolution] = UNSET, + trackers_created_dt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_date: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + trackers_embargoed: Union[Unset, bool] = UNSET, + trackers_external_system_id: Union[Unset, str] = UNSET, + trackers_ps_update_stream: Union[Unset, str] = UNSET, + trackers_resolution: Union[Unset, str] = UNSET, + trackers_status: Union[Unset, str] = UNSET, + trackers_type: Union[Unset, OsidbApiV1AffectsListTrackersType] = UNSET, + trackers_updated_dt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_date: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + trackers_uuid: Union[Unset, UUID] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Response[OsidbApiV1AffectsListResponse200]: + """ + Args: + affectedness (Union[Unset, OsidbApiV1AffectsListAffectedness]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_comment (Union[Unset, str]): + cvss_scores_created_dt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_date (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_created_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_cvss_version (Union[Unset, str]): + cvss_scores_issuer (Union[Unset, OsidbApiV1AffectsListCvssScoresIssuer]): + cvss_scores_score (Union[Unset, float]): + cvss_scores_updated_dt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_date (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_uuid (Union[Unset, UUID]): + cvss_scores_vector (Union[Unset, str]): + embargoed (Union[Unset, bool]): + exclude_fields (Union[Unset, list[str]]): + flaw_components (Union[Unset, list[str]]): + flaw_created_dt (Union[Unset, datetime.datetime]): + flaw_created_dt_date (Union[Unset, datetime.date]): + flaw_created_dt_date_gte (Union[Unset, datetime.date]): + flaw_created_dt_date_lte (Union[Unset, datetime.date]): + flaw_created_dt_gt (Union[Unset, datetime.datetime]): + flaw_created_dt_gte (Union[Unset, datetime.datetime]): + flaw_created_dt_lt (Union[Unset, datetime.datetime]): + flaw_created_dt_lte (Union[Unset, datetime.datetime]): + flaw_cve_id (Union[Unset, str]): + flaw_cwe_id (Union[Unset, str]): + flaw_embargoed (Union[Unset, bool]): + flaw_impact (Union[Unset, OsidbApiV1AffectsListFlawImpact]): + flaw_reported_dt (Union[Unset, datetime.datetime]): + flaw_reported_dt_date (Union[Unset, datetime.date]): + flaw_reported_dt_date_gte (Union[Unset, datetime.date]): + flaw_reported_dt_date_lte (Union[Unset, datetime.date]): + flaw_reported_dt_gt (Union[Unset, datetime.datetime]): + flaw_reported_dt_gte (Union[Unset, datetime.datetime]): + flaw_reported_dt_lt (Union[Unset, datetime.datetime]): + flaw_reported_dt_lte (Union[Unset, datetime.datetime]): + flaw_source (Union[Unset, OsidbApiV1AffectsListFlawSource]): + flaw_unembargo_dt (Union[Unset, datetime.datetime]): + flaw_updated_dt (Union[Unset, datetime.datetime]): + flaw_updated_dt_date (Union[Unset, datetime.date]): + flaw_updated_dt_date_gte (Union[Unset, datetime.date]): + flaw_updated_dt_date_lte (Union[Unset, datetime.date]): + flaw_updated_dt_gt (Union[Unset, datetime.datetime]): + flaw_updated_dt_gte (Union[Unset, datetime.datetime]): + flaw_updated_dt_lt (Union[Unset, datetime.datetime]): + flaw_updated_dt_lte (Union[Unset, datetime.datetime]): + flaw_uuid (Union[Unset, UUID]): + impact (Union[Unset, OsidbApiV1AffectsListImpact]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + order (Union[Unset, list[OsidbApiV1AffectsListOrderItem]]): + ps_component (Union[Unset, str]): + ps_module (Union[Unset, str]): + resolution (Union[Unset, OsidbApiV1AffectsListResolution]): + trackers_created_dt (Union[Unset, datetime.datetime]): + trackers_created_dt_date (Union[Unset, datetime.date]): + trackers_created_dt_date_gte (Union[Unset, datetime.date]): + trackers_created_dt_date_lte (Union[Unset, datetime.date]): + trackers_created_dt_gt (Union[Unset, datetime.datetime]): + trackers_created_dt_gte (Union[Unset, datetime.datetime]): + trackers_created_dt_lt (Union[Unset, datetime.datetime]): + trackers_created_dt_lte (Union[Unset, datetime.datetime]): + trackers_embargoed (Union[Unset, bool]): + trackers_external_system_id (Union[Unset, str]): + trackers_ps_update_stream (Union[Unset, str]): + trackers_resolution (Union[Unset, str]): + trackers_status (Union[Unset, str]): + trackers_type (Union[Unset, OsidbApiV1AffectsListTrackersType]): + trackers_updated_dt (Union[Unset, datetime.datetime]): + trackers_updated_dt_date (Union[Unset, datetime.date]): + trackers_updated_dt_date_gte (Union[Unset, datetime.date]): + trackers_updated_dt_date_lte (Union[Unset, datetime.date]): + trackers_updated_dt_gt (Union[Unset, datetime.datetime]): + trackers_updated_dt_gte (Union[Unset, datetime.datetime]): + trackers_updated_dt_lt (Union[Unset, datetime.datetime]): + trackers_updated_dt_lte (Union[Unset, datetime.datetime]): + trackers_uuid (Union[Unset, UUID]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsListResponse200] + """ + kwargs = _get_kwargs( client=client, affectedness=affectedness, @@ -1623,125 +1896,238 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - affectedness: Union[Unset, None, OsidbApiV1AffectsListAffectedness] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_comment: Union[Unset, None, str] = UNSET, - cvss_scores_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_cvss_version: Union[Unset, None, str] = UNSET, - cvss_scores_issuer: Union[ - Unset, None, OsidbApiV1AffectsListCvssScoresIssuer - ] = UNSET, - cvss_scores_score: Union[Unset, None, float] = UNSET, - cvss_scores_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_uuid: Union[Unset, None, str] = UNSET, - cvss_scores_vector: Union[Unset, None, str] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - flaw_components: Union[Unset, None, List[str]] = UNSET, - flaw_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_cve_id: Union[Unset, None, str] = UNSET, - flaw_cwe_id: Union[Unset, None, str] = UNSET, - flaw_embargoed: Union[Unset, None, bool] = UNSET, - flaw_impact: Union[Unset, None, OsidbApiV1AffectsListFlawImpact] = UNSET, - flaw_reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_source: Union[Unset, None, OsidbApiV1AffectsListFlawSource] = UNSET, - flaw_unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - flaw_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - flaw_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - flaw_uuid: Union[Unset, None, str] = UNSET, - impact: Union[Unset, None, OsidbApiV1AffectsListImpact] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1AffectsListOrderItem]] = UNSET, - ps_component: Union[Unset, None, str] = UNSET, - ps_module: Union[Unset, None, str] = UNSET, - resolution: Union[Unset, None, OsidbApiV1AffectsListResolution] = UNSET, - trackers_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - trackers_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_embargoed: Union[Unset, None, bool] = UNSET, - trackers_external_system_id: Union[Unset, None, str] = UNSET, - trackers_ps_update_stream: Union[Unset, None, str] = UNSET, - trackers_resolution: Union[Unset, None, str] = UNSET, - trackers_status: Union[Unset, None, str] = UNSET, - trackers_type: Union[Unset, None, OsidbApiV1AffectsListTrackersType] = UNSET, - trackers_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - trackers_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - trackers_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - trackers_uuid: Union[Unset, None, str] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + affectedness: Union[Unset, OsidbApiV1AffectsListAffectedness] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_comment: Union[Unset, str] = UNSET, + cvss_scores_created_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_cvss_version: Union[Unset, str] = UNSET, + cvss_scores_issuer: Union[Unset, OsidbApiV1AffectsListCvssScoresIssuer] = UNSET, + cvss_scores_score: Union[Unset, float] = UNSET, + cvss_scores_updated_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_uuid: Union[Unset, UUID] = UNSET, + cvss_scores_vector: Union[Unset, str] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + flaw_components: Union[Unset, list[str]] = UNSET, + flaw_created_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_cve_id: Union[Unset, str] = UNSET, + flaw_cwe_id: Union[Unset, str] = UNSET, + flaw_embargoed: Union[Unset, bool] = UNSET, + flaw_impact: Union[Unset, OsidbApiV1AffectsListFlawImpact] = UNSET, + flaw_reported_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_source: Union[Unset, OsidbApiV1AffectsListFlawSource] = UNSET, + flaw_unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_date: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + flaw_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + flaw_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + flaw_uuid: Union[Unset, UUID] = UNSET, + impact: Union[Unset, OsidbApiV1AffectsListImpact] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1AffectsListOrderItem]] = UNSET, + ps_component: Union[Unset, str] = UNSET, + ps_module: Union[Unset, str] = UNSET, + resolution: Union[Unset, OsidbApiV1AffectsListResolution] = UNSET, + trackers_created_dt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_date: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + trackers_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + trackers_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + trackers_embargoed: Union[Unset, bool] = UNSET, + trackers_external_system_id: Union[Unset, str] = UNSET, + trackers_ps_update_stream: Union[Unset, str] = UNSET, + trackers_resolution: Union[Unset, str] = UNSET, + trackers_status: Union[Unset, str] = UNSET, + trackers_type: Union[Unset, OsidbApiV1AffectsListTrackersType] = UNSET, + trackers_updated_dt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_date: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + trackers_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + trackers_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + trackers_uuid: Union[Unset, UUID] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Optional[OsidbApiV1AffectsListResponse200]: - """ """ + """ + Args: + affectedness (Union[Unset, OsidbApiV1AffectsListAffectedness]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_comment (Union[Unset, str]): + cvss_scores_created_dt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_date (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_created_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_cvss_version (Union[Unset, str]): + cvss_scores_issuer (Union[Unset, OsidbApiV1AffectsListCvssScoresIssuer]): + cvss_scores_score (Union[Unset, float]): + cvss_scores_updated_dt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_date (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_uuid (Union[Unset, UUID]): + cvss_scores_vector (Union[Unset, str]): + embargoed (Union[Unset, bool]): + exclude_fields (Union[Unset, list[str]]): + flaw_components (Union[Unset, list[str]]): + flaw_created_dt (Union[Unset, datetime.datetime]): + flaw_created_dt_date (Union[Unset, datetime.date]): + flaw_created_dt_date_gte (Union[Unset, datetime.date]): + flaw_created_dt_date_lte (Union[Unset, datetime.date]): + flaw_created_dt_gt (Union[Unset, datetime.datetime]): + flaw_created_dt_gte (Union[Unset, datetime.datetime]): + flaw_created_dt_lt (Union[Unset, datetime.datetime]): + flaw_created_dt_lte (Union[Unset, datetime.datetime]): + flaw_cve_id (Union[Unset, str]): + flaw_cwe_id (Union[Unset, str]): + flaw_embargoed (Union[Unset, bool]): + flaw_impact (Union[Unset, OsidbApiV1AffectsListFlawImpact]): + flaw_reported_dt (Union[Unset, datetime.datetime]): + flaw_reported_dt_date (Union[Unset, datetime.date]): + flaw_reported_dt_date_gte (Union[Unset, datetime.date]): + flaw_reported_dt_date_lte (Union[Unset, datetime.date]): + flaw_reported_dt_gt (Union[Unset, datetime.datetime]): + flaw_reported_dt_gte (Union[Unset, datetime.datetime]): + flaw_reported_dt_lt (Union[Unset, datetime.datetime]): + flaw_reported_dt_lte (Union[Unset, datetime.datetime]): + flaw_source (Union[Unset, OsidbApiV1AffectsListFlawSource]): + flaw_unembargo_dt (Union[Unset, datetime.datetime]): + flaw_updated_dt (Union[Unset, datetime.datetime]): + flaw_updated_dt_date (Union[Unset, datetime.date]): + flaw_updated_dt_date_gte (Union[Unset, datetime.date]): + flaw_updated_dt_date_lte (Union[Unset, datetime.date]): + flaw_updated_dt_gt (Union[Unset, datetime.datetime]): + flaw_updated_dt_gte (Union[Unset, datetime.datetime]): + flaw_updated_dt_lt (Union[Unset, datetime.datetime]): + flaw_updated_dt_lte (Union[Unset, datetime.datetime]): + flaw_uuid (Union[Unset, UUID]): + impact (Union[Unset, OsidbApiV1AffectsListImpact]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + order (Union[Unset, list[OsidbApiV1AffectsListOrderItem]]): + ps_component (Union[Unset, str]): + ps_module (Union[Unset, str]): + resolution (Union[Unset, OsidbApiV1AffectsListResolution]): + trackers_created_dt (Union[Unset, datetime.datetime]): + trackers_created_dt_date (Union[Unset, datetime.date]): + trackers_created_dt_date_gte (Union[Unset, datetime.date]): + trackers_created_dt_date_lte (Union[Unset, datetime.date]): + trackers_created_dt_gt (Union[Unset, datetime.datetime]): + trackers_created_dt_gte (Union[Unset, datetime.datetime]): + trackers_created_dt_lt (Union[Unset, datetime.datetime]): + trackers_created_dt_lte (Union[Unset, datetime.datetime]): + trackers_embargoed (Union[Unset, bool]): + trackers_external_system_id (Union[Unset, str]): + trackers_ps_update_stream (Union[Unset, str]): + trackers_resolution (Union[Unset, str]): + trackers_status (Union[Unset, str]): + trackers_type (Union[Unset, OsidbApiV1AffectsListTrackersType]): + trackers_updated_dt (Union[Unset, datetime.datetime]): + trackers_updated_dt_date (Union[Unset, datetime.date]): + trackers_updated_dt_date_gte (Union[Unset, datetime.date]): + trackers_updated_dt_date_lte (Union[Unset, datetime.date]): + trackers_updated_dt_gt (Union[Unset, datetime.datetime]): + trackers_updated_dt_gte (Union[Unset, datetime.datetime]): + trackers_updated_dt_lt (Union[Unset, datetime.datetime]): + trackers_updated_dt_lte (Union[Unset, datetime.datetime]): + trackers_uuid (Union[Unset, UUID]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, affectedness=affectedness, created_dt=created_dt, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_retrieve.py index 2a3a563..7fcbe38 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_retrieve.py @@ -1,74 +1,67 @@ -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_affects_retrieve_response_200 import ( OsidbApiV1AffectsRetrieveResponse200, ) from ...types import UNSET, Response, Unset QUERY_PARAMS = { - "exclude_fields": List[str], - "include_fields": List[str], - "include_meta_attr": List[str], + "exclude_fields": list[str], + "include_fields": list[str], + "include_meta_attr": list[str], } def _get_kwargs( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects/{uuid}".format( - client.base_url, - uuid=uuid, - ) - - headers: Dict[str, Any] = client.get_headers() + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields - json_include_meta_attr: Union[Unset, None, List[str]] = UNSET + params["include_fields"] = json_include_fields + + json_include_meta_attr: Union[Unset, list[str]] = UNSET if not isinstance(include_meta_attr, Unset): - if include_meta_attr is None: - json_include_meta_attr = None - else: - json_include_meta_attr = include_meta_attr + json_include_meta_attr = include_meta_attr + + params["include_meta_attr"] = json_include_meta_attr - params: Dict[str, Any] = { - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - "include_meta_attr": json_include_meta_attr, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/affects/{uuid}".format( + uuid=uuid, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AffectsRetrieveResponse200 if isinstance(_response_200, Unset): @@ -77,28 +70,42 @@ def _parse_response( response_200 = OsidbApiV1AffectsRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1AffectsRetrieveResponse200]: + """ + Args: + uuid (UUID): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsRetrieveResponse200] + """ + kwargs = _get_kwargs( uuid=uuid, client=client, @@ -115,18 +122,31 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1AffectsRetrieveResponse200]: - """ """ + """ + Args: + uuid (UUID): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsRetrieveResponse200 + """ return sync_detailed( uuid=uuid, @@ -137,14 +157,29 @@ def sync( ).parsed -async def async_detailed( - uuid: str, +async def asyncio_detailed( + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1AffectsRetrieveResponse200]: + """ + Args: + uuid (UUID): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsRetrieveResponse200] + """ + kwargs = _get_kwargs( uuid=uuid, client=client, @@ -161,21 +196,34 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - uuid: str, +async def asyncio( + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1AffectsRetrieveResponse200]: - """ """ + """ + Args: + uuid (UUID): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( uuid=uuid, client=client, exclude_fields=exclude_fields, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_update.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_update.py index 18e21b3..37e461c 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_update.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_affects_update.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.affect import Affect from ...models.osidb_api_v1_affects_update_response_200 import ( OsidbApiV1AffectsUpdateResponse200, @@ -10,43 +12,45 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = Affect def _get_kwargs( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - form_data: Affect, - multipart_data: Affect, - json_body: Affect, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/affects/{uuid}".format( - client.base_url, - uuid=uuid, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + Affect, + Affect, + Affect, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/affects/{uuid}".format( + uuid=uuid, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, Affect): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AffectsUpdateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AffectsUpdateResponse200 if isinstance(_response_200, Unset): @@ -55,34 +59,50 @@ def _parse_response( response_200 = OsidbApiV1AffectsUpdateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AffectsUpdateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - form_data: Affect, - multipart_data: Affect, - json_body: Affect, + body: Union[ + Affect, + Affect, + Affect, + ], ) -> Response[OsidbApiV1AffectsUpdateResponse200]: + """ + Args: + uuid (UUID): + bugzilla_api_key (str): + jira_api_key (str): + body (Affect): Affect serializer + body (Affect): Affect serializer + body (Affect): Affect serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsUpdateResponse200] + """ + kwargs = _get_kwargs( uuid=uuid, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.put( @@ -93,42 +113,74 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - form_data: Affect, - multipart_data: Affect, - json_body: Affect, + body: Union[ + Affect, + Affect, + Affect, + ], ) -> Optional[OsidbApiV1AffectsUpdateResponse200]: - """ """ + """ + Args: + uuid (UUID): + bugzilla_api_key (str): + jira_api_key (str): + body (Affect): Affect serializer + body (Affect): Affect serializer + body (Affect): Affect serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsUpdateResponse200 + """ return sync_detailed( uuid=uuid, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( - uuid: str, +async def asyncio_detailed( + uuid: UUID, *, client: AuthenticatedClient, - form_data: Affect, - multipart_data: Affect, - json_body: Affect, + body: Union[ + Affect, + Affect, + Affect, + ], ) -> Response[OsidbApiV1AffectsUpdateResponse200]: + """ + Args: + uuid (UUID): + bugzilla_api_key (str): + jira_api_key (str): + body (Affect): Affect serializer + body (Affect): Affect serializer + body (Affect): Affect serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AffectsUpdateResponse200] + """ + kwargs = _get_kwargs( uuid=uuid, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().put( @@ -139,25 +191,40 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - uuid: str, +async def asyncio( + uuid: UUID, *, client: AuthenticatedClient, - form_data: Affect, - multipart_data: Affect, - json_body: Affect, + body: Union[ + Affect, + Affect, + Affect, + ], ) -> Optional[OsidbApiV1AffectsUpdateResponse200]: - """ """ + """ + Args: + uuid (UUID): + bugzilla_api_key (str): + jira_api_key (str): + body (Affect): Affect serializer + body (Affect): Affect serializer + body (Affect): Affect serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AffectsUpdateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( uuid=uuid, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_alerts_list.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_alerts_list.py index 0afa364..f615a50 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_alerts_list.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_alerts_list.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_alerts_list_alert_type import OsidbApiV1AlertsListAlertType from ...models.osidb_api_v1_alerts_list_parent_model import ( OsidbApiV1AlertsListParentModel, @@ -14,90 +16,89 @@ QUERY_PARAMS = { "alert_type": OsidbApiV1AlertsListAlertType, - "exclude_fields": List[str], - "include_fields": List[str], + "exclude_fields": list[str], + "include_fields": list[str], "limit": int, "name": str, "offset": int, "parent_model": OsidbApiV1AlertsListParentModel, - "parent_uuid": str, - "uuid": str, + "parent_uuid": UUID, + "uuid": UUID, } def _get_kwargs( *, client: AuthenticatedClient, - alert_type: Union[Unset, None, OsidbApiV1AlertsListAlertType] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - name: Union[Unset, None, str] = UNSET, - offset: Union[Unset, None, int] = UNSET, - parent_model: Union[Unset, None, OsidbApiV1AlertsListParentModel] = UNSET, - parent_uuid: Union[Unset, None, str] = UNSET, - uuid: Union[Unset, None, str] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/alerts".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - json_alert_type: Union[Unset, None, str] = UNSET + alert_type: Union[Unset, OsidbApiV1AlertsListAlertType] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + name: Union[Unset, str] = UNSET, + offset: Union[Unset, int] = UNSET, + parent_model: Union[Unset, OsidbApiV1AlertsListParentModel] = UNSET, + parent_uuid: Union[Unset, UUID] = UNSET, + uuid: Union[Unset, UUID] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + json_alert_type: Union[Unset, str] = UNSET if not isinstance(alert_type, Unset): + json_alert_type = OsidbApiV1AlertsListAlertType(alert_type).value - json_alert_type = ( - OsidbApiV1AlertsListAlertType(alert_type).value if alert_type else None - ) + params["alert_type"] = json_alert_type - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + params["exclude_fields"] = json_exclude_fields + + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields + + params["limit"] = limit + + params["name"] = name - json_parent_model: Union[Unset, None, str] = UNSET + params["offset"] = offset + + json_parent_model: Union[Unset, str] = UNSET if not isinstance(parent_model, Unset): + json_parent_model = OsidbApiV1AlertsListParentModel(parent_model).value - json_parent_model = ( - OsidbApiV1AlertsListParentModel(parent_model).value - if parent_model - else None - ) + params["parent_model"] = json_parent_model + + json_parent_uuid: Union[Unset, str] = UNSET + if not isinstance(parent_uuid, Unset): + json_parent_uuid = str(parent_uuid) + + params["parent_uuid"] = json_parent_uuid + + json_uuid: Union[Unset, str] = UNSET + if not isinstance(uuid, Unset): + json_uuid = str(uuid) + + params["uuid"] = json_uuid - params: Dict[str, Any] = { - "alert_type": json_alert_type, - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - "limit": limit, - "name": name, - "offset": offset, - "parent_model": json_parent_model, - "parent_uuid": parent_uuid, - "uuid": uuid, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/alerts", "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AlertsListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AlertsListResponse200 if isinstance(_response_200, Unset): @@ -106,33 +107,53 @@ def _parse_response( response_200 = OsidbApiV1AlertsListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AlertsListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - alert_type: Union[Unset, None, OsidbApiV1AlertsListAlertType] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - name: Union[Unset, None, str] = UNSET, - offset: Union[Unset, None, int] = UNSET, - parent_model: Union[Unset, None, OsidbApiV1AlertsListParentModel] = UNSET, - parent_uuid: Union[Unset, None, str] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + alert_type: Union[Unset, OsidbApiV1AlertsListAlertType] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + name: Union[Unset, str] = UNSET, + offset: Union[Unset, int] = UNSET, + parent_model: Union[Unset, OsidbApiV1AlertsListParentModel] = UNSET, + parent_uuid: Union[Unset, UUID] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Response[OsidbApiV1AlertsListResponse200]: + """List existing alerts for all models. + + Args: + alert_type (Union[Unset, OsidbApiV1AlertsListAlertType]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + name (Union[Unset, str]): + offset (Union[Unset, int]): + parent_model (Union[Unset, OsidbApiV1AlertsListParentModel]): + parent_uuid (Union[Unset, UUID]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AlertsListResponse200] + """ + kwargs = _get_kwargs( client=client, alert_type=alert_type, @@ -154,23 +175,42 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - alert_type: Union[Unset, None, OsidbApiV1AlertsListAlertType] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - name: Union[Unset, None, str] = UNSET, - offset: Union[Unset, None, int] = UNSET, - parent_model: Union[Unset, None, OsidbApiV1AlertsListParentModel] = UNSET, - parent_uuid: Union[Unset, None, str] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + alert_type: Union[Unset, OsidbApiV1AlertsListAlertType] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + name: Union[Unset, str] = UNSET, + offset: Union[Unset, int] = UNSET, + parent_model: Union[Unset, OsidbApiV1AlertsListParentModel] = UNSET, + parent_uuid: Union[Unset, UUID] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Optional[OsidbApiV1AlertsListResponse200]: - """List existing alerts for all models.""" + """List existing alerts for all models. + + Args: + alert_type (Union[Unset, OsidbApiV1AlertsListAlertType]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + name (Union[Unset, str]): + offset (Union[Unset, int]): + parent_model (Union[Unset, OsidbApiV1AlertsListParentModel]): + parent_uuid (Union[Unset, UUID]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AlertsListResponse200 + """ return sync_detailed( client=client, @@ -186,19 +226,40 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - alert_type: Union[Unset, None, OsidbApiV1AlertsListAlertType] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - name: Union[Unset, None, str] = UNSET, - offset: Union[Unset, None, int] = UNSET, - parent_model: Union[Unset, None, OsidbApiV1AlertsListParentModel] = UNSET, - parent_uuid: Union[Unset, None, str] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + alert_type: Union[Unset, OsidbApiV1AlertsListAlertType] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + name: Union[Unset, str] = UNSET, + offset: Union[Unset, int] = UNSET, + parent_model: Union[Unset, OsidbApiV1AlertsListParentModel] = UNSET, + parent_uuid: Union[Unset, UUID] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Response[OsidbApiV1AlertsListResponse200]: + """List existing alerts for all models. + + Args: + alert_type (Union[Unset, OsidbApiV1AlertsListAlertType]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + name (Union[Unset, str]): + offset (Union[Unset, int]): + parent_model (Union[Unset, OsidbApiV1AlertsListParentModel]): + parent_uuid (Union[Unset, UUID]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AlertsListResponse200] + """ + kwargs = _get_kwargs( client=client, alert_type=alert_type, @@ -220,26 +281,45 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - alert_type: Union[Unset, None, OsidbApiV1AlertsListAlertType] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - name: Union[Unset, None, str] = UNSET, - offset: Union[Unset, None, int] = UNSET, - parent_model: Union[Unset, None, OsidbApiV1AlertsListParentModel] = UNSET, - parent_uuid: Union[Unset, None, str] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + alert_type: Union[Unset, OsidbApiV1AlertsListAlertType] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + name: Union[Unset, str] = UNSET, + offset: Union[Unset, int] = UNSET, + parent_model: Union[Unset, OsidbApiV1AlertsListParentModel] = UNSET, + parent_uuid: Union[Unset, UUID] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Optional[OsidbApiV1AlertsListResponse200]: - """List existing alerts for all models.""" + """List existing alerts for all models. + + Args: + alert_type (Union[Unset, OsidbApiV1AlertsListAlertType]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + name (Union[Unset, str]): + offset (Union[Unset, int]): + parent_model (Union[Unset, OsidbApiV1AlertsListParentModel]): + parent_uuid (Union[Unset, UUID]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AlertsListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, alert_type=alert_type, exclude_fields=exclude_fields, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_alerts_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_alerts_retrieve.py index 1f68d7c..3eed186 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_alerts_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_alerts_retrieve.py @@ -1,64 +1,59 @@ -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_alerts_retrieve_response_200 import ( OsidbApiV1AlertsRetrieveResponse200, ) from ...types import UNSET, Response, Unset QUERY_PARAMS = { - "exclude_fields": List[str], - "include_fields": List[str], + "exclude_fields": list[str], + "include_fields": list[str], } def _get_kwargs( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/alerts/{uuid}".format( - client.base_url, - uuid=uuid, - ) - - headers: Dict[str, Any] = client.get_headers() + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields - params: Dict[str, Any] = { - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/alerts/{uuid}".format( + uuid=uuid, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AlertsRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AlertsRetrieveResponse200 if isinstance(_response_200, Unset): @@ -67,27 +62,40 @@ def _parse_response( response_200 = OsidbApiV1AlertsRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AlertsRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1AlertsRetrieveResponse200]: + """ + Args: + uuid (UUID): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AlertsRetrieveResponse200] + """ + kwargs = _get_kwargs( uuid=uuid, client=client, @@ -103,17 +111,29 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1AlertsRetrieveResponse200]: - """ """ + """ + Args: + uuid (UUID): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AlertsRetrieveResponse200 + """ return sync_detailed( uuid=uuid, @@ -123,13 +143,27 @@ def sync( ).parsed -async def async_detailed( - uuid: str, +async def asyncio_detailed( + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1AlertsRetrieveResponse200]: + """ + Args: + uuid (UUID): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AlertsRetrieveResponse200] + """ + kwargs = _get_kwargs( uuid=uuid, client=client, @@ -145,20 +179,32 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - uuid: str, +async def asyncio( + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1AlertsRetrieveResponse200]: - """ """ + """ + Args: + uuid (UUID): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AlertsRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( uuid=uuid, client=client, exclude_fields=exclude_fields, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_audit_list.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_audit_list.py index 5de3468..0ec58ef 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_audit_list.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_audit_list.py @@ -1,9 +1,10 @@ import datetime -from typing import Any, Dict, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_audit_list_response_200 import ( OsidbApiV1AuditListResponse200, ) @@ -22,44 +23,46 @@ def _get_kwargs( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - pgh_created_at: Union[Unset, None, datetime.datetime] = UNSET, - pgh_label: Union[Unset, None, str] = UNSET, - pgh_obj_model: Union[Unset, None, str] = UNSET, - pgh_slug: Union[Unset, None, str] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/audit".format( - client.base_url, - ) + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + pgh_created_at: Union[Unset, datetime.datetime] = UNSET, + pgh_label: Union[Unset, str] = UNSET, + pgh_obj_model: Union[Unset, str] = UNSET, + pgh_slug: Union[Unset, str] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + params["limit"] = limit - headers: Dict[str, Any] = client.get_headers() + params["offset"] = offset - json_pgh_created_at: Union[Unset, None, str] = UNSET + json_pgh_created_at: Union[Unset, str] = UNSET if not isinstance(pgh_created_at, Unset): - json_pgh_created_at = pgh_created_at.isoformat() if pgh_created_at else None - - params: Dict[str, Any] = { - "limit": limit, - "offset": offset, - "pgh_created_at": json_pgh_created_at, - "pgh_label": pgh_label, - "pgh_obj_model": pgh_obj_model, - "pgh_slug": pgh_slug, - } + json_pgh_created_at = pgh_created_at.isoformat() + + params["pgh_created_at"] = json_pgh_created_at + + params["pgh_label"] = pgh_label + + params["pgh_obj_model"] = pgh_obj_model + + params["pgh_slug"] = pgh_slug + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/audit", "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AuditListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AuditListResponse200 if isinstance(_response_200, Unset): @@ -68,30 +71,47 @@ def _parse_response( response_200 = OsidbApiV1AuditListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AuditListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - pgh_created_at: Union[Unset, None, datetime.datetime] = UNSET, - pgh_label: Union[Unset, None, str] = UNSET, - pgh_obj_model: Union[Unset, None, str] = UNSET, - pgh_slug: Union[Unset, None, str] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + pgh_created_at: Union[Unset, datetime.datetime] = UNSET, + pgh_label: Union[Unset, str] = UNSET, + pgh_obj_model: Union[Unset, str] = UNSET, + pgh_slug: Union[Unset, str] = UNSET, ) -> Response[OsidbApiV1AuditListResponse200]: + """basic view of audit history events + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + pgh_created_at (Union[Unset, datetime.datetime]): + pgh_label (Union[Unset, str]): + pgh_obj_model (Union[Unset, str]): + pgh_slug (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AuditListResponse200] + """ + kwargs = _get_kwargs( client=client, limit=limit, @@ -110,20 +130,36 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - pgh_created_at: Union[Unset, None, datetime.datetime] = UNSET, - pgh_label: Union[Unset, None, str] = UNSET, - pgh_obj_model: Union[Unset, None, str] = UNSET, - pgh_slug: Union[Unset, None, str] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + pgh_created_at: Union[Unset, datetime.datetime] = UNSET, + pgh_label: Union[Unset, str] = UNSET, + pgh_obj_model: Union[Unset, str] = UNSET, + pgh_slug: Union[Unset, str] = UNSET, ) -> Optional[OsidbApiV1AuditListResponse200]: - """basic view of audit history events""" + """basic view of audit history events + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + pgh_created_at (Union[Unset, datetime.datetime]): + pgh_label (Union[Unset, str]): + pgh_obj_model (Union[Unset, str]): + pgh_slug (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AuditListResponse200 + """ return sync_detailed( client=client, @@ -136,16 +172,34 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - pgh_created_at: Union[Unset, None, datetime.datetime] = UNSET, - pgh_label: Union[Unset, None, str] = UNSET, - pgh_obj_model: Union[Unset, None, str] = UNSET, - pgh_slug: Union[Unset, None, str] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + pgh_created_at: Union[Unset, datetime.datetime] = UNSET, + pgh_label: Union[Unset, str] = UNSET, + pgh_obj_model: Union[Unset, str] = UNSET, + pgh_slug: Union[Unset, str] = UNSET, ) -> Response[OsidbApiV1AuditListResponse200]: + """basic view of audit history events + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + pgh_created_at (Union[Unset, datetime.datetime]): + pgh_label (Union[Unset, str]): + pgh_obj_model (Union[Unset, str]): + pgh_slug (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AuditListResponse200] + """ + kwargs = _get_kwargs( client=client, limit=limit, @@ -164,23 +218,39 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - pgh_created_at: Union[Unset, None, datetime.datetime] = UNSET, - pgh_label: Union[Unset, None, str] = UNSET, - pgh_obj_model: Union[Unset, None, str] = UNSET, - pgh_slug: Union[Unset, None, str] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + pgh_created_at: Union[Unset, datetime.datetime] = UNSET, + pgh_label: Union[Unset, str] = UNSET, + pgh_obj_model: Union[Unset, str] = UNSET, + pgh_slug: Union[Unset, str] = UNSET, ) -> Optional[OsidbApiV1AuditListResponse200]: - """basic view of audit history events""" + """basic view of audit history events + + Args: + limit (Union[Unset, int]): + offset (Union[Unset, int]): + pgh_created_at (Union[Unset, datetime.datetime]): + pgh_label (Union[Unset, str]): + pgh_obj_model (Union[Unset, str]): + pgh_slug (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AuditListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, limit=limit, offset=offset, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_audit_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_audit_retrieve.py index 39daeca..dbb3be1 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_audit_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_audit_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_audit_retrieve_response_200 import ( OsidbApiV1AuditRetrieveResponse200, ) @@ -15,24 +16,21 @@ def _get_kwargs( pgh_slug: str, *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/audit/{pgh_slug}".format( - client.base_url, - pgh_slug=pgh_slug, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/audit/{pgh_slug}".format( + pgh_slug=pgh_slug, + ), } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AuditRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AuditRetrieveResponse200 if isinstance(_response_200, Unset): @@ -41,17 +39,16 @@ def _parse_response( response_200 = OsidbApiV1AuditRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AuditRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -60,6 +57,19 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[OsidbApiV1AuditRetrieveResponse200]: + """basic view of audit history events + + Args: + pgh_slug (str): The unique identifier across all event tables. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AuditRetrieveResponse200] + """ + kwargs = _get_kwargs( pgh_slug=pgh_slug, client=client, @@ -73,7 +83,7 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( @@ -81,7 +91,18 @@ def sync( *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1AuditRetrieveResponse200]: - """basic view of audit history events""" + """basic view of audit history events + + Args: + pgh_slug (str): The unique identifier across all event tables. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AuditRetrieveResponse200 + """ return sync_detailed( pgh_slug=pgh_slug, @@ -89,11 +110,24 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( pgh_slug: str, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1AuditRetrieveResponse200]: + """basic view of audit history events + + Args: + pgh_slug (str): The unique identifier across all event tables. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AuditRetrieveResponse200] + """ + kwargs = _get_kwargs( pgh_slug=pgh_slug, client=client, @@ -107,18 +141,29 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( pgh_slug: str, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1AuditRetrieveResponse200]: - """basic view of audit history events""" + """basic view of audit history events + + Args: + pgh_slug (str): The unique identifier across all event tables. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AuditRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( pgh_slug=pgh_slug, client=client, ) diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_audit_update.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_audit_update.py index 57788d5..5f8cd64 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_audit_update.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_audit_update.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.audit import Audit from ...models.osidb_api_v1_audit_update_response_200 import ( OsidbApiV1AuditUpdateResponse200, @@ -10,6 +11,7 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = Audit @@ -17,36 +19,37 @@ def _get_kwargs( pgh_slug: str, *, client: AuthenticatedClient, - form_data: Audit, - multipart_data: Audit, - json_body: Audit, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/audit/{pgh_slug}".format( - client.base_url, - pgh_slug=pgh_slug, - ) + body: Union[ + Audit, + Audit, + Audit, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/audit/{pgh_slug}".format( + pgh_slug=pgh_slug, + ), + } - headers: Dict[str, Any] = client.get_headers() + if isinstance(body, Audit): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() - - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1AuditUpdateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1AuditUpdateResponse200 if isinstance(_response_200, Unset): @@ -55,17 +58,16 @@ def _parse_response( response_200 = OsidbApiV1AuditUpdateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1AuditUpdateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -73,16 +75,32 @@ def sync_detailed( pgh_slug: str, *, client: AuthenticatedClient, - form_data: Audit, - multipart_data: Audit, - json_body: Audit, + body: Union[ + Audit, + Audit, + Audit, + ], ) -> Response[OsidbApiV1AuditUpdateResponse200]: + """basic view of audit history events + + Args: + pgh_slug (str): The unique identifier across all event tables. + body (Audit): + body (Audit): + body (Audit): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AuditUpdateResponse200] + """ + kwargs = _get_kwargs( pgh_slug=pgh_slug, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.put( @@ -93,42 +111,72 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( pgh_slug: str, *, client: AuthenticatedClient, - form_data: Audit, - multipart_data: Audit, - json_body: Audit, + body: Union[ + Audit, + Audit, + Audit, + ], ) -> Optional[OsidbApiV1AuditUpdateResponse200]: - """basic view of audit history events""" + """basic view of audit history events + + Args: + pgh_slug (str): The unique identifier across all event tables. + body (Audit): + body (Audit): + body (Audit): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AuditUpdateResponse200 + """ return sync_detailed( pgh_slug=pgh_slug, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( +async def asyncio_detailed( pgh_slug: str, *, client: AuthenticatedClient, - form_data: Audit, - multipart_data: Audit, - json_body: Audit, + body: Union[ + Audit, + Audit, + Audit, + ], ) -> Response[OsidbApiV1AuditUpdateResponse200]: + """basic view of audit history events + + Args: + pgh_slug (str): The unique identifier across all event tables. + body (Audit): + body (Audit): + body (Audit): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1AuditUpdateResponse200] + """ + kwargs = _get_kwargs( pgh_slug=pgh_slug, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().put( @@ -139,25 +187,39 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( pgh_slug: str, *, client: AuthenticatedClient, - form_data: Audit, - multipart_data: Audit, - json_body: Audit, + body: Union[ + Audit, + Audit, + Audit, + ], ) -> Optional[OsidbApiV1AuditUpdateResponse200]: - """basic view of audit history events""" + """basic view of audit history events + + Args: + pgh_slug (str): The unique identifier across all event tables. + body (Audit): + body (Audit): + body (Audit): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1AuditUpdateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( pgh_slug=pgh_slug, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_create.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_create.py index bcda2d6..966dc2e 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_create.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_create.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.flaw_acknowledgment_post import FlawAcknowledgmentPost from ...models.osidb_api_v1_flaws_acknowledgments_create_response_201 import ( OsidbApiV1FlawsAcknowledgmentsCreateResponse201, @@ -10,81 +12,96 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = FlawAcknowledgmentPost def _get_kwargs( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawAcknowledgmentPost, - multipart_data: FlawAcknowledgmentPost, - json_body: FlawAcknowledgmentPost, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/acknowledgments".format( - client.base_url, - flaw_id=flaw_id, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + FlawAcknowledgmentPost, + FlawAcknowledgmentPost, + FlawAcknowledgmentPost, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/acknowledgments".format( + flaw_id=flaw_id, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, FlawAcknowledgmentPost): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsAcknowledgmentsCreateResponse201]: if response.status_code == 201: + # } _response_201 = response.json() response_201: OsidbApiV1FlawsAcknowledgmentsCreateResponse201 if isinstance(_response_201, Unset): response_201 = UNSET else: - response_201 = OsidbApiV1FlawsAcknowledgmentsCreateResponse201.from_dict( - _response_201 - ) + response_201 = OsidbApiV1FlawsAcknowledgmentsCreateResponse201.from_dict(_response_201) return response_201 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsAcknowledgmentsCreateResponse201]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawAcknowledgmentPost, - multipart_data: FlawAcknowledgmentPost, - json_body: FlawAcknowledgmentPost, + body: Union[ + FlawAcknowledgmentPost, + FlawAcknowledgmentPost, + FlawAcknowledgmentPost, + ], ) -> Response[OsidbApiV1FlawsAcknowledgmentsCreateResponse201]: + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawAcknowledgmentPost): FlawAcknowledgment serializer + body (FlawAcknowledgmentPost): FlawAcknowledgment serializer + body (FlawAcknowledgmentPost): FlawAcknowledgment serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsAcknowledgmentsCreateResponse201] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -95,42 +112,72 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawAcknowledgmentPost, - multipart_data: FlawAcknowledgmentPost, - json_body: FlawAcknowledgmentPost, + body: Union[ + FlawAcknowledgmentPost, + FlawAcknowledgmentPost, + FlawAcknowledgmentPost, + ], ) -> Optional[OsidbApiV1FlawsAcknowledgmentsCreateResponse201]: - """ """ + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawAcknowledgmentPost): FlawAcknowledgment serializer + body (FlawAcknowledgmentPost): FlawAcknowledgment serializer + body (FlawAcknowledgmentPost): FlawAcknowledgment serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsAcknowledgmentsCreateResponse201 + """ return sync_detailed( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawAcknowledgmentPost, - multipart_data: FlawAcknowledgmentPost, - json_body: FlawAcknowledgmentPost, + body: Union[ + FlawAcknowledgmentPost, + FlawAcknowledgmentPost, + FlawAcknowledgmentPost, + ], ) -> Response[OsidbApiV1FlawsAcknowledgmentsCreateResponse201]: + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawAcknowledgmentPost): FlawAcknowledgment serializer + body (FlawAcknowledgmentPost): FlawAcknowledgment serializer + body (FlawAcknowledgmentPost): FlawAcknowledgment serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsAcknowledgmentsCreateResponse201] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -141,25 +188,39 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawAcknowledgmentPost, - multipart_data: FlawAcknowledgmentPost, - json_body: FlawAcknowledgmentPost, + body: Union[ + FlawAcknowledgmentPost, + FlawAcknowledgmentPost, + FlawAcknowledgmentPost, + ], ) -> Optional[OsidbApiV1FlawsAcknowledgmentsCreateResponse201]: - """ """ + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawAcknowledgmentPost): FlawAcknowledgment serializer + body (FlawAcknowledgmentPost): FlawAcknowledgment serializer + body (FlawAcknowledgmentPost): FlawAcknowledgment serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsAcknowledgmentsCreateResponse201 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_destroy.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_destroy.py index bc8d180..2644f8a 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_destroy.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_destroy.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_acknowledgments_destroy_response_200 import ( OsidbApiV1FlawsAcknowledgmentsDestroyResponse200, ) @@ -12,59 +14,71 @@ def _get_kwargs( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/acknowledgments/{id}".format( - client.base_url, - flaw_id=flaw_id, - id=id, - ) +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/acknowledgments/{id}".format( + flaw_id=flaw_id, + id=id, + ), } + _kwargs["headers"] = headers + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsAcknowledgmentsDestroyResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsAcknowledgmentsDestroyResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsAcknowledgmentsDestroyResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsAcknowledgmentsDestroyResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsAcknowledgmentsDestroyResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1FlawsAcknowledgmentsDestroyResponse200]: + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsAcknowledgmentsDestroyResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -79,16 +93,29 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1FlawsAcknowledgmentsDestroyResponse200]: - """Destroy the instance and proxy the delete to Bugzilla""" + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsAcknowledgmentsDestroyResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -97,12 +124,27 @@ def sync( ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1FlawsAcknowledgmentsDestroyResponse200]: + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsAcknowledgmentsDestroyResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -117,19 +159,32 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1FlawsAcknowledgmentsDestroyResponse200]: - """Destroy the instance and proxy the delete to Bugzilla""" + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsAcknowledgmentsDestroyResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, id=id, client=client, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_list.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_list.py index 3289b00..8fe5436 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_list.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_list.py @@ -1,9 +1,11 @@ import datetime -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_acknowledgments_list_response_200 import ( OsidbApiV1FlawsAcknowledgmentsListResponse200, ) @@ -19,9 +21,9 @@ "created_dt__gte": datetime.datetime, "created_dt__lt": datetime.datetime, "created_dt__lte": datetime.datetime, - "exclude_fields": List[str], + "exclude_fields": list[str], "from_upstream": bool, - "include_fields": List[str], + "include_fields": list[str], "limit": int, "name": str, "offset": int, @@ -33,224 +35,268 @@ "updated_dt__gte": datetime.datetime, "updated_dt__lt": datetime.datetime, "updated_dt__lte": datetime.datetime, - "uuid": str, + "uuid": UUID, } def _get_kwargs( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - affiliation: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - from_upstream: Union[Unset, None, bool] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - name: Union[Unset, None, str] = UNSET, - offset: Union[Unset, None, int] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/acknowledgments".format( - client.base_url, - flaw_id=flaw_id, - ) - - headers: Dict[str, Any] = client.get_headers() - - json_created_dt: Union[Unset, None, str] = UNSET + affiliation: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + from_upstream: Union[Unset, bool] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + name: Union[Unset, str] = UNSET, + offset: Union[Unset, int] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + params["affiliation"] = affiliation + + json_created_dt: Union[Unset, str] = UNSET if not isinstance(created_dt, Unset): - json_created_dt = created_dt.isoformat() if created_dt else None + json_created_dt = created_dt.isoformat() + + params["created_dt"] = json_created_dt - json_created_dt_date: Union[Unset, None, str] = UNSET + json_created_dt_date: Union[Unset, str] = UNSET if not isinstance(created_dt_date, Unset): - json_created_dt_date = created_dt_date.isoformat() if created_dt_date else None + json_created_dt_date = created_dt_date.isoformat() - json_created_dt_date_gte: Union[Unset, None, str] = UNSET + params["created_dt__date"] = json_created_dt_date + + json_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_gte, Unset): - json_created_dt_date_gte = ( - created_dt_date_gte.isoformat() if created_dt_date_gte else None - ) + json_created_dt_date_gte = created_dt_date_gte.isoformat() + + params["created_dt__date__gte"] = json_created_dt_date_gte - json_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_lte, Unset): - json_created_dt_date_lte = ( - created_dt_date_lte.isoformat() if created_dt_date_lte else None - ) + json_created_dt_date_lte = created_dt_date_lte.isoformat() + + params["created_dt__date__lte"] = json_created_dt_date_lte - json_created_dt_gt: Union[Unset, None, str] = UNSET + json_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(created_dt_gt, Unset): - json_created_dt_gt = created_dt_gt.isoformat() if created_dt_gt else None + json_created_dt_gt = created_dt_gt.isoformat() - json_created_dt_gte: Union[Unset, None, str] = UNSET + params["created_dt__gt"] = json_created_dt_gt + + json_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_gte, Unset): - json_created_dt_gte = created_dt_gte.isoformat() if created_dt_gte else None + json_created_dt_gte = created_dt_gte.isoformat() + + params["created_dt__gte"] = json_created_dt_gte - json_created_dt_lt: Union[Unset, None, str] = UNSET + json_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(created_dt_lt, Unset): - json_created_dt_lt = created_dt_lt.isoformat() if created_dt_lt else None + json_created_dt_lt = created_dt_lt.isoformat() + + params["created_dt__lt"] = json_created_dt_lt - json_created_dt_lte: Union[Unset, None, str] = UNSET + json_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_lte, Unset): - json_created_dt_lte = created_dt_lte.isoformat() if created_dt_lte else None + json_created_dt_lte = created_dt_lte.isoformat() - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + params["created_dt__lte"] = json_created_dt_lte + + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + params["exclude_fields"] = json_exclude_fields + + params["from_upstream"] = from_upstream + + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields + + params["limit"] = limit - json_updated_dt: Union[Unset, None, str] = UNSET + params["name"] = name + + params["offset"] = offset + + json_updated_dt: Union[Unset, str] = UNSET if not isinstance(updated_dt, Unset): - json_updated_dt = updated_dt.isoformat() if updated_dt else None + json_updated_dt = updated_dt.isoformat() + + params["updated_dt"] = json_updated_dt - json_updated_dt_date: Union[Unset, None, str] = UNSET + json_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(updated_dt_date, Unset): - json_updated_dt_date = updated_dt_date.isoformat() if updated_dt_date else None + json_updated_dt_date = updated_dt_date.isoformat() - json_updated_dt_date_gte: Union[Unset, None, str] = UNSET + params["updated_dt__date"] = json_updated_dt_date + + json_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_gte, Unset): - json_updated_dt_date_gte = ( - updated_dt_date_gte.isoformat() if updated_dt_date_gte else None - ) + json_updated_dt_date_gte = updated_dt_date_gte.isoformat() - json_updated_dt_date_lte: Union[Unset, None, str] = UNSET + params["updated_dt__date__gte"] = json_updated_dt_date_gte + + json_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_lte, Unset): - json_updated_dt_date_lte = ( - updated_dt_date_lte.isoformat() if updated_dt_date_lte else None - ) + json_updated_dt_date_lte = updated_dt_date_lte.isoformat() + + params["updated_dt__date__lte"] = json_updated_dt_date_lte - json_updated_dt_gt: Union[Unset, None, str] = UNSET + json_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(updated_dt_gt, Unset): - json_updated_dt_gt = updated_dt_gt.isoformat() if updated_dt_gt else None + json_updated_dt_gt = updated_dt_gt.isoformat() - json_updated_dt_gte: Union[Unset, None, str] = UNSET + params["updated_dt__gt"] = json_updated_dt_gt + + json_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_gte, Unset): - json_updated_dt_gte = updated_dt_gte.isoformat() if updated_dt_gte else None + json_updated_dt_gte = updated_dt_gte.isoformat() + + params["updated_dt__gte"] = json_updated_dt_gte - json_updated_dt_lt: Union[Unset, None, str] = UNSET + json_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(updated_dt_lt, Unset): - json_updated_dt_lt = updated_dt_lt.isoformat() if updated_dt_lt else None + json_updated_dt_lt = updated_dt_lt.isoformat() - json_updated_dt_lte: Union[Unset, None, str] = UNSET + params["updated_dt__lt"] = json_updated_dt_lt + + json_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_lte, Unset): - json_updated_dt_lte = updated_dt_lte.isoformat() if updated_dt_lte else None - - params: Dict[str, Any] = { - "affiliation": affiliation, - "created_dt": json_created_dt, - "created_dt__date": json_created_dt_date, - "created_dt__date__gte": json_created_dt_date_gte, - "created_dt__date__lte": json_created_dt_date_lte, - "created_dt__gt": json_created_dt_gt, - "created_dt__gte": json_created_dt_gte, - "created_dt__lt": json_created_dt_lt, - "created_dt__lte": json_created_dt_lte, - "exclude_fields": json_exclude_fields, - "from_upstream": from_upstream, - "include_fields": json_include_fields, - "limit": limit, - "name": name, - "offset": offset, - "updated_dt": json_updated_dt, - "updated_dt__date": json_updated_dt_date, - "updated_dt__date__gte": json_updated_dt_date_gte, - "updated_dt__date__lte": json_updated_dt_date_lte, - "updated_dt__gt": json_updated_dt_gt, - "updated_dt__gte": json_updated_dt_gte, - "updated_dt__lt": json_updated_dt_lt, - "updated_dt__lte": json_updated_dt_lte, - "uuid": uuid, - } + json_updated_dt_lte = updated_dt_lte.isoformat() + + params["updated_dt__lte"] = json_updated_dt_lte + + json_uuid: Union[Unset, str] = UNSET + if not isinstance(uuid, Unset): + json_uuid = str(uuid) + + params["uuid"] = json_uuid + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/acknowledgments".format( + flaw_id=flaw_id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsAcknowledgmentsListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsAcknowledgmentsListResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsAcknowledgmentsListResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsAcknowledgmentsListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsAcknowledgmentsListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - affiliation: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - from_upstream: Union[Unset, None, bool] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - name: Union[Unset, None, str] = UNSET, - offset: Union[Unset, None, int] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + affiliation: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + from_upstream: Union[Unset, bool] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + name: Union[Unset, str] = UNSET, + offset: Union[Unset, int] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Response[OsidbApiV1FlawsAcknowledgmentsListResponse200]: + """ + Args: + flaw_id (UUID): + affiliation (Union[Unset, str]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + exclude_fields (Union[Unset, list[str]]): + from_upstream (Union[Unset, bool]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + name (Union[Unset, str]): + offset (Union[Unset, int]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsAcknowledgmentsListResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, @@ -288,39 +334,73 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - affiliation: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - from_upstream: Union[Unset, None, bool] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - name: Union[Unset, None, str] = UNSET, - offset: Union[Unset, None, int] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + affiliation: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + from_upstream: Union[Unset, bool] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + name: Union[Unset, str] = UNSET, + offset: Union[Unset, int] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Optional[OsidbApiV1FlawsAcknowledgmentsListResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + affiliation (Union[Unset, str]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + exclude_fields (Union[Unset, list[str]]): + from_upstream (Union[Unset, bool]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + name (Union[Unset, str]): + offset (Union[Unset, int]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsAcknowledgmentsListResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -352,35 +432,71 @@ def sync( ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, *, client: AuthenticatedClient, - affiliation: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - from_upstream: Union[Unset, None, bool] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - name: Union[Unset, None, str] = UNSET, - offset: Union[Unset, None, int] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + affiliation: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + from_upstream: Union[Unset, bool] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + name: Union[Unset, str] = UNSET, + offset: Union[Unset, int] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Response[OsidbApiV1FlawsAcknowledgmentsListResponse200]: + """ + Args: + flaw_id (UUID): + affiliation (Union[Unset, str]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + exclude_fields (Union[Unset, list[str]]): + from_upstream (Union[Unset, bool]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + name (Union[Unset, str]): + offset (Union[Unset, int]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsAcknowledgmentsListResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, @@ -418,42 +534,76 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, *, client: AuthenticatedClient, - affiliation: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - from_upstream: Union[Unset, None, bool] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - name: Union[Unset, None, str] = UNSET, - offset: Union[Unset, None, int] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + affiliation: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + from_upstream: Union[Unset, bool] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + name: Union[Unset, str] = UNSET, + offset: Union[Unset, int] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Optional[OsidbApiV1FlawsAcknowledgmentsListResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + affiliation (Union[Unset, str]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + exclude_fields (Union[Unset, list[str]]): + from_upstream (Union[Unset, bool]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + name (Union[Unset, str]): + offset (Union[Unset, int]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsAcknowledgmentsListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, client=client, affiliation=affiliation, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_retrieve.py index c1c926b..1eee9b8 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_retrieve.py @@ -1,98 +1,105 @@ -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_acknowledgments_retrieve_response_200 import ( OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200, ) from ...types import UNSET, Response, Unset QUERY_PARAMS = { - "exclude_fields": List[str], - "include_fields": List[str], + "exclude_fields": list[str], + "include_fields": list[str], } def _get_kwargs( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/acknowledgments/{id}".format( - client.base_url, - flaw_id=flaw_id, - id=id, - ) + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - headers: Dict[str, Any] = client.get_headers() - - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields - params: Dict[str, Any] = { - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/acknowledgments/{id}".format( + flaw_id=flaw_id, + id=id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -109,18 +116,31 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -131,14 +151,29 @@ def sync( ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -155,21 +190,34 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, id=id, client=client, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_update.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_update.py index 02d8536..139fa86 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_update.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_acknowledgments_update.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.flaw_acknowledgment_put import FlawAcknowledgmentPut from ...models.osidb_api_v1_flaws_acknowledgments_update_response_200 import ( OsidbApiV1FlawsAcknowledgmentsUpdateResponse200, @@ -10,85 +12,101 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = FlawAcknowledgmentPut def _get_kwargs( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawAcknowledgmentPut, - multipart_data: FlawAcknowledgmentPut, - json_body: FlawAcknowledgmentPut, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/acknowledgments/{id}".format( - client.base_url, - flaw_id=flaw_id, - id=id, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + FlawAcknowledgmentPut, + FlawAcknowledgmentPut, + FlawAcknowledgmentPut, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/acknowledgments/{id}".format( + flaw_id=flaw_id, + id=id, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, FlawAcknowledgmentPut): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsAcknowledgmentsUpdateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsAcknowledgmentsUpdateResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsAcknowledgmentsUpdateResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsAcknowledgmentsUpdateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsAcknowledgmentsUpdateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawAcknowledgmentPut, - multipart_data: FlawAcknowledgmentPut, - json_body: FlawAcknowledgmentPut, + body: Union[ + FlawAcknowledgmentPut, + FlawAcknowledgmentPut, + FlawAcknowledgmentPut, + ], ) -> Response[OsidbApiV1FlawsAcknowledgmentsUpdateResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawAcknowledgmentPut): FlawAcknowledgment serializer + body (FlawAcknowledgmentPut): FlawAcknowledgment serializer + body (FlawAcknowledgmentPut): FlawAcknowledgment serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsAcknowledgmentsUpdateResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.put( @@ -99,46 +117,78 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawAcknowledgmentPut, - multipart_data: FlawAcknowledgmentPut, - json_body: FlawAcknowledgmentPut, + body: Union[ + FlawAcknowledgmentPut, + FlawAcknowledgmentPut, + FlawAcknowledgmentPut, + ], ) -> Optional[OsidbApiV1FlawsAcknowledgmentsUpdateResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawAcknowledgmentPut): FlawAcknowledgment serializer + body (FlawAcknowledgmentPut): FlawAcknowledgment serializer + body (FlawAcknowledgmentPut): FlawAcknowledgment serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsAcknowledgmentsUpdateResponse200 + """ return sync_detailed( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawAcknowledgmentPut, - multipart_data: FlawAcknowledgmentPut, - json_body: FlawAcknowledgmentPut, + body: Union[ + FlawAcknowledgmentPut, + FlawAcknowledgmentPut, + FlawAcknowledgmentPut, + ], ) -> Response[OsidbApiV1FlawsAcknowledgmentsUpdateResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawAcknowledgmentPut): FlawAcknowledgment serializer + body (FlawAcknowledgmentPut): FlawAcknowledgment serializer + body (FlawAcknowledgmentPut): FlawAcknowledgment serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsAcknowledgmentsUpdateResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().put( @@ -149,27 +199,42 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawAcknowledgmentPut, - multipart_data: FlawAcknowledgmentPut, - json_body: FlawAcknowledgmentPut, + body: Union[ + FlawAcknowledgmentPut, + FlawAcknowledgmentPut, + FlawAcknowledgmentPut, + ], ) -> Optional[OsidbApiV1FlawsAcknowledgmentsUpdateResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawAcknowledgmentPut): FlawAcknowledgment serializer + body (FlawAcknowledgmentPut): FlawAcknowledgment serializer + body (FlawAcknowledgmentPut): FlawAcknowledgment serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsAcknowledgmentsUpdateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_comments_create.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_comments_create.py index 4533ee3..00c2206 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_comments_create.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_comments_create.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.flaw_comment_post import FlawCommentPost from ...models.osidb_api_v1_flaws_comments_create_response_201 import ( OsidbApiV1FlawsCommentsCreateResponse201, @@ -10,6 +11,7 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = FlawCommentPost @@ -17,57 +19,55 @@ def _get_kwargs( flaw_id: str, *, client: AuthenticatedClient, - form_data: FlawCommentPost, - multipart_data: FlawCommentPost, - json_body: FlawCommentPost, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/comments".format( - client.base_url, - flaw_id=flaw_id, - ) + body: Union[ + FlawCommentPost, + FlawCommentPost, + FlawCommentPost, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/comments".format( + flaw_id=flaw_id, + ), + } - headers: Dict[str, Any] = client.get_headers() + if isinstance(body, FlawCommentPost): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() - - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsCommentsCreateResponse201]: if response.status_code == 201: + # } _response_201 = response.json() response_201: OsidbApiV1FlawsCommentsCreateResponse201 if isinstance(_response_201, Unset): response_201 = UNSET else: - response_201 = OsidbApiV1FlawsCommentsCreateResponse201.from_dict( - _response_201 - ) + response_201 = OsidbApiV1FlawsCommentsCreateResponse201.from_dict(_response_201) return response_201 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsCommentsCreateResponse201]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -75,16 +75,34 @@ def sync_detailed( flaw_id: str, *, client: AuthenticatedClient, - form_data: FlawCommentPost, - multipart_data: FlawCommentPost, - json_body: FlawCommentPost, + body: Union[ + FlawCommentPost, + FlawCommentPost, + FlawCommentPost, + ], ) -> Response[OsidbApiV1FlawsCommentsCreateResponse201]: + """Create a new comment for a given flaw. Beware that freshly created comments are not guaranteed to + keep their original UUIDs, especially if multiple comments are created simultaneously. + + Args: + flaw_id (str): + bugzilla_api_key (str): + body (FlawCommentPost): FlawComment serializer for use by flaw_comments endpoint + body (FlawCommentPost): FlawComment serializer for use by flaw_comments endpoint + body (FlawCommentPost): FlawComment serializer for use by flaw_comments endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCommentsCreateResponse201] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -95,42 +113,76 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( flaw_id: str, *, client: AuthenticatedClient, - form_data: FlawCommentPost, - multipart_data: FlawCommentPost, - json_body: FlawCommentPost, + body: Union[ + FlawCommentPost, + FlawCommentPost, + FlawCommentPost, + ], ) -> Optional[OsidbApiV1FlawsCommentsCreateResponse201]: - """Create a new comment for a given flaw. Beware that freshly created comments are not guaranteed to keep their original UUIDs, especially if multiple comments are created simultaneously.""" + """Create a new comment for a given flaw. Beware that freshly created comments are not guaranteed to + keep their original UUIDs, especially if multiple comments are created simultaneously. + + Args: + flaw_id (str): + bugzilla_api_key (str): + body (FlawCommentPost): FlawComment serializer for use by flaw_comments endpoint + body (FlawCommentPost): FlawComment serializer for use by flaw_comments endpoint + body (FlawCommentPost): FlawComment serializer for use by flaw_comments endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCommentsCreateResponse201 + """ return sync_detailed( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( +async def asyncio_detailed( flaw_id: str, *, client: AuthenticatedClient, - form_data: FlawCommentPost, - multipart_data: FlawCommentPost, - json_body: FlawCommentPost, + body: Union[ + FlawCommentPost, + FlawCommentPost, + FlawCommentPost, + ], ) -> Response[OsidbApiV1FlawsCommentsCreateResponse201]: + """Create a new comment for a given flaw. Beware that freshly created comments are not guaranteed to + keep their original UUIDs, especially if multiple comments are created simultaneously. + + Args: + flaw_id (str): + bugzilla_api_key (str): + body (FlawCommentPost): FlawComment serializer for use by flaw_comments endpoint + body (FlawCommentPost): FlawComment serializer for use by flaw_comments endpoint + body (FlawCommentPost): FlawComment serializer for use by flaw_comments endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCommentsCreateResponse201] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -141,25 +193,41 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( flaw_id: str, *, client: AuthenticatedClient, - form_data: FlawCommentPost, - multipart_data: FlawCommentPost, - json_body: FlawCommentPost, + body: Union[ + FlawCommentPost, + FlawCommentPost, + FlawCommentPost, + ], ) -> Optional[OsidbApiV1FlawsCommentsCreateResponse201]: - """Create a new comment for a given flaw. Beware that freshly created comments are not guaranteed to keep their original UUIDs, especially if multiple comments are created simultaneously.""" + """Create a new comment for a given flaw. Beware that freshly created comments are not guaranteed to + keep their original UUIDs, especially if multiple comments are created simultaneously. + + Args: + flaw_id (str): + bugzilla_api_key (str): + body (FlawCommentPost): FlawComment serializer for use by flaw_comments endpoint + body (FlawCommentPost): FlawComment serializer for use by flaw_comments endpoint + body (FlawCommentPost): FlawComment serializer for use by flaw_comments endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCommentsCreateResponse201 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_comments_list.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_comments_list.py index 0d1c9b2..5ad4eb6 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_comments_list.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_comments_list.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_comments_list_response_200 import ( OsidbApiV1FlawsCommentsListResponse200, ) @@ -10,13 +12,13 @@ QUERY_PARAMS = { "creator": str, - "exclude_fields": List[str], + "exclude_fields": list[str], "external_system_id": str, - "include_fields": List[str], + "include_fields": list[str], "limit": int, "offset": int, "order": int, - "uuid": str, + "uuid": UUID, } @@ -24,80 +26,80 @@ def _get_kwargs( flaw_id: str, *, client: AuthenticatedClient, - creator: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - external_system_id: Union[Unset, None, str] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, int] = UNSET, - uuid: Union[Unset, None, str] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/comments".format( - client.base_url, - flaw_id=flaw_id, - ) + creator: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + external_system_id: Union[Unset, str] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, int] = UNSET, + uuid: Union[Unset, UUID] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - headers: Dict[str, Any] = client.get_headers() + params["creator"] = creator - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + params["external_system_id"] = external_system_id + + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields - - params: Dict[str, Any] = { - "creator": creator, - "exclude_fields": json_exclude_fields, - "external_system_id": external_system_id, - "include_fields": json_include_fields, - "limit": limit, - "offset": offset, - "order": order, - "uuid": uuid, - } + json_include_fields = include_fields + + params["include_fields"] = json_include_fields + + params["limit"] = limit + + params["offset"] = offset + + params["order"] = order + + json_uuid: Union[Unset, str] = UNSET + if not isinstance(uuid, Unset): + json_uuid = str(uuid) + + params["uuid"] = json_uuid + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/comments".format( + flaw_id=flaw_id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsCommentsListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsCommentsListResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsCommentsListResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsCommentsListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsCommentsListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -105,15 +107,37 @@ def sync_detailed( flaw_id: str, *, client: AuthenticatedClient, - creator: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - external_system_id: Union[Unset, None, str] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, int] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + creator: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + external_system_id: Union[Unset, str] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, int] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Response[OsidbApiV1FlawsCommentsListResponse200]: + """List existing comments for a given flaw. Beware that freshly created comments are not guaranteed to + keep their original UUIDs, especially if multiple comments are created simultaneously. + + Args: + flaw_id (str): + creator (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + external_system_id (Union[Unset, str]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + order (Union[Unset, int]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCommentsListResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, @@ -135,23 +159,43 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( flaw_id: str, *, client: AuthenticatedClient, - creator: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - external_system_id: Union[Unset, None, str] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, int] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + creator: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + external_system_id: Union[Unset, str] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, int] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Optional[OsidbApiV1FlawsCommentsListResponse200]: - """List existing comments for a given flaw. Beware that freshly created comments are not guaranteed to keep their original UUIDs, especially if multiple comments are created simultaneously.""" + """List existing comments for a given flaw. Beware that freshly created comments are not guaranteed to + keep their original UUIDs, especially if multiple comments are created simultaneously. + + Args: + flaw_id (str): + creator (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + external_system_id (Union[Unset, str]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + order (Union[Unset, int]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCommentsListResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -167,19 +211,41 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( flaw_id: str, *, client: AuthenticatedClient, - creator: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - external_system_id: Union[Unset, None, str] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, int] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + creator: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + external_system_id: Union[Unset, str] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, int] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Response[OsidbApiV1FlawsCommentsListResponse200]: + """List existing comments for a given flaw. Beware that freshly created comments are not guaranteed to + keep their original UUIDs, especially if multiple comments are created simultaneously. + + Args: + flaw_id (str): + creator (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + external_system_id (Union[Unset, str]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + order (Union[Unset, int]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCommentsListResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, @@ -201,26 +267,46 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( flaw_id: str, *, client: AuthenticatedClient, - creator: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - external_system_id: Union[Unset, None, str] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, int] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + creator: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + external_system_id: Union[Unset, str] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, int] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Optional[OsidbApiV1FlawsCommentsListResponse200]: - """List existing comments for a given flaw. Beware that freshly created comments are not guaranteed to keep their original UUIDs, especially if multiple comments are created simultaneously.""" + """List existing comments for a given flaw. Beware that freshly created comments are not guaranteed to + keep their original UUIDs, especially if multiple comments are created simultaneously. + + Args: + flaw_id (str): + creator (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + external_system_id (Union[Unset, str]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + order (Union[Unset, int]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCommentsListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, client=client, creator=creator, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_comments_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_comments_retrieve.py index 28f3da9..d0f658c 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_comments_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_comments_retrieve.py @@ -1,16 +1,17 @@ -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_comments_retrieve_response_200 import ( OsidbApiV1FlawsCommentsRetrieveResponse200, ) from ...types import UNSET, Response, Unset QUERY_PARAMS = { - "exclude_fields": List[str], - "include_fields": List[str], + "exclude_fields": list[str], + "include_fields": list[str], } @@ -19,69 +20,59 @@ def _get_kwargs( comment_id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/comments/{comment_id}".format( - client.base_url, - flaw_id=flaw_id, - comment_id=comment_id, - ) - - headers: Dict[str, Any] = client.get_headers() + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields - params: Dict[str, Any] = { - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/comments/{comment_id}".format( + flaw_id=flaw_id, + comment_id=comment_id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsCommentsRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsCommentsRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsCommentsRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsCommentsRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsCommentsRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -90,9 +81,26 @@ def sync_detailed( comment_id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1FlawsCommentsRetrieveResponse200]: + """Retrieve a single existing comments for a given flaw. Beware that freshly created comments are not + guaranteed to keep their original UUIDs, especially if multiple comments are created simultaneously. + + Args: + flaw_id (str): + comment_id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCommentsRetrieveResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, comment_id=comment_id, @@ -109,7 +117,7 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( @@ -117,10 +125,25 @@ def sync( comment_id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1FlawsCommentsRetrieveResponse200]: - """Retrieve a single existing comments for a given flaw. Beware that freshly created comments are not guaranteed to keep their original UUIDs, especially if multiple comments are created simultaneously.""" + """Retrieve a single existing comments for a given flaw. Beware that freshly created comments are not + guaranteed to keep their original UUIDs, especially if multiple comments are created simultaneously. + + Args: + flaw_id (str): + comment_id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCommentsRetrieveResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -131,14 +154,31 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( flaw_id: str, comment_id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1FlawsCommentsRetrieveResponse200]: + """Retrieve a single existing comments for a given flaw. Beware that freshly created comments are not + guaranteed to keep their original UUIDs, especially if multiple comments are created simultaneously. + + Args: + flaw_id (str): + comment_id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCommentsRetrieveResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, comment_id=comment_id, @@ -155,21 +195,36 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( flaw_id: str, comment_id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1FlawsCommentsRetrieveResponse200]: - """Retrieve a single existing comments for a given flaw. Beware that freshly created comments are not guaranteed to keep their original UUIDs, especially if multiple comments are created simultaneously.""" + """Retrieve a single existing comments for a given flaw. Beware that freshly created comments are not + guaranteed to keep their original UUIDs, especially if multiple comments are created simultaneously. + + Args: + flaw_id (str): + comment_id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCommentsRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, comment_id=comment_id, client=client, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_create.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_create.py index 348fc75..0f00c8a 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_create.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_create.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.flaw_post import FlawPost from ...models.osidb_api_v1_flaws_create_response_201 import ( OsidbApiV1FlawsCreateResponse201, @@ -10,41 +11,42 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = FlawPost def _get_kwargs( *, client: AuthenticatedClient, - form_data: FlawPost, - multipart_data: FlawPost, - json_body: FlawPost, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws".format( - client.base_url, - ) + body: Union[ + FlawPost, + FlawPost, + FlawPost, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/flaws", + } - headers: Dict[str, Any] = client.get_headers() + if isinstance(body, FlawPost): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() - - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsCreateResponse201]: if response.status_code == 201: + # } _response_201 = response.json() response_201: OsidbApiV1FlawsCreateResponse201 if isinstance(_response_201, Unset): @@ -53,32 +55,47 @@ def _parse_response( response_201 = OsidbApiV1FlawsCreateResponse201.from_dict(_response_201) return response_201 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsCreateResponse201]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - form_data: FlawPost, - multipart_data: FlawPost, - json_body: FlawPost, + body: Union[ + FlawPost, + FlawPost, + FlawPost, + ], ) -> Response[OsidbApiV1FlawsCreateResponse201]: + """ + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (FlawPost): serialize flaw model + body (FlawPost): serialize flaw model + body (FlawPost): serialize flaw model + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCreateResponse201] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -89,38 +106,68 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - form_data: FlawPost, - multipart_data: FlawPost, - json_body: FlawPost, + body: Union[ + FlawPost, + FlawPost, + FlawPost, + ], ) -> Optional[OsidbApiV1FlawsCreateResponse201]: - """ """ + """ + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (FlawPost): serialize flaw model + body (FlawPost): serialize flaw model + body (FlawPost): serialize flaw model + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCreateResponse201 + """ return sync_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - form_data: FlawPost, - multipart_data: FlawPost, - json_body: FlawPost, + body: Union[ + FlawPost, + FlawPost, + FlawPost, + ], ) -> Response[OsidbApiV1FlawsCreateResponse201]: + """ + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (FlawPost): serialize flaw model + body (FlawPost): serialize flaw model + body (FlawPost): serialize flaw model + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCreateResponse201] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -131,23 +178,37 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - form_data: FlawPost, - multipart_data: FlawPost, - json_body: FlawPost, + body: Union[ + FlawPost, + FlawPost, + FlawPost, + ], ) -> Optional[OsidbApiV1FlawsCreateResponse201]: - """ """ + """ + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (FlawPost): serialize flaw model + body (FlawPost): serialize flaw model + body (FlawPost): serialize flaw model + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCreateResponse201 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_create.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_create.py index 775e844..8e46c92 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_create.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_create.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.flaw_cvss_post import FlawCVSSPost from ...models.osidb_api_v1_flaws_cvss_scores_create_response_201 import ( OsidbApiV1FlawsCvssScoresCreateResponse201, @@ -10,81 +12,96 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = FlawCVSSPost def _get_kwargs( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawCVSSPost, - multipart_data: FlawCVSSPost, - json_body: FlawCVSSPost, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/cvss_scores".format( - client.base_url, - flaw_id=flaw_id, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + FlawCVSSPost, + FlawCVSSPost, + FlawCVSSPost, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/cvss_scores".format( + flaw_id=flaw_id, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, FlawCVSSPost): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsCvssScoresCreateResponse201]: if response.status_code == 201: + # } _response_201 = response.json() response_201: OsidbApiV1FlawsCvssScoresCreateResponse201 if isinstance(_response_201, Unset): response_201 = UNSET else: - response_201 = OsidbApiV1FlawsCvssScoresCreateResponse201.from_dict( - _response_201 - ) + response_201 = OsidbApiV1FlawsCvssScoresCreateResponse201.from_dict(_response_201) return response_201 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsCvssScoresCreateResponse201]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawCVSSPost, - multipart_data: FlawCVSSPost, - json_body: FlawCVSSPost, + body: Union[ + FlawCVSSPost, + FlawCVSSPost, + FlawCVSSPost, + ], ) -> Response[OsidbApiV1FlawsCvssScoresCreateResponse201]: + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawCVSSPost): FlawCVSS serializer + body (FlawCVSSPost): FlawCVSS serializer + body (FlawCVSSPost): FlawCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCvssScoresCreateResponse201] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -95,42 +112,72 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawCVSSPost, - multipart_data: FlawCVSSPost, - json_body: FlawCVSSPost, + body: Union[ + FlawCVSSPost, + FlawCVSSPost, + FlawCVSSPost, + ], ) -> Optional[OsidbApiV1FlawsCvssScoresCreateResponse201]: - """ """ + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawCVSSPost): FlawCVSS serializer + body (FlawCVSSPost): FlawCVSS serializer + body (FlawCVSSPost): FlawCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCvssScoresCreateResponse201 + """ return sync_detailed( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawCVSSPost, - multipart_data: FlawCVSSPost, - json_body: FlawCVSSPost, + body: Union[ + FlawCVSSPost, + FlawCVSSPost, + FlawCVSSPost, + ], ) -> Response[OsidbApiV1FlawsCvssScoresCreateResponse201]: + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawCVSSPost): FlawCVSS serializer + body (FlawCVSSPost): FlawCVSS serializer + body (FlawCVSSPost): FlawCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCvssScoresCreateResponse201] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -141,25 +188,39 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawCVSSPost, - multipart_data: FlawCVSSPost, - json_body: FlawCVSSPost, + body: Union[ + FlawCVSSPost, + FlawCVSSPost, + FlawCVSSPost, + ], ) -> Optional[OsidbApiV1FlawsCvssScoresCreateResponse201]: - """ """ + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawCVSSPost): FlawCVSS serializer + body (FlawCVSSPost): FlawCVSS serializer + body (FlawCVSSPost): FlawCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCvssScoresCreateResponse201 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_destroy.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_destroy.py index f60930c..aa27231 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_destroy.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_destroy.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_cvss_scores_destroy_response_200 import ( OsidbApiV1FlawsCvssScoresDestroyResponse200, ) @@ -12,59 +14,71 @@ def _get_kwargs( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/cvss_scores/{id}".format( - client.base_url, - flaw_id=flaw_id, - id=id, - ) +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/cvss_scores/{id}".format( + flaw_id=flaw_id, + id=id, + ), } + _kwargs["headers"] = headers + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsCvssScoresDestroyResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsCvssScoresDestroyResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsCvssScoresDestroyResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsCvssScoresDestroyResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsCvssScoresDestroyResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1FlawsCvssScoresDestroyResponse200]: + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCvssScoresDestroyResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -79,16 +93,29 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1FlawsCvssScoresDestroyResponse200]: - """Destroy the instance and proxy the delete to Bugzilla""" + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCvssScoresDestroyResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -97,12 +124,27 @@ def sync( ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1FlawsCvssScoresDestroyResponse200]: + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCvssScoresDestroyResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -117,19 +159,32 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1FlawsCvssScoresDestroyResponse200]: - """Destroy the instance and proxy the delete to Bugzilla""" + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCvssScoresDestroyResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, id=id, client=client, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_list.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_list.py index 7a584ba..66a97dd 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_list.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_list.py @@ -1,9 +1,11 @@ import datetime -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_cvss_scores_list_issuer import ( OsidbApiV1FlawsCvssScoresListIssuer, ) @@ -23,8 +25,8 @@ "created_dt__lt": datetime.datetime, "created_dt__lte": datetime.datetime, "cvss_version": str, - "exclude_fields": List[str], - "include_fields": List[str], + "exclude_fields": list[str], + "include_fields": list[str], "issuer": OsidbApiV1FlawsCvssScoresListIssuer, "limit": int, "offset": int, @@ -37,238 +39,283 @@ "updated_dt__gte": datetime.datetime, "updated_dt__lt": datetime.datetime, "updated_dt__lte": datetime.datetime, - "uuid": str, + "uuid": UUID, "vector": str, } def _get_kwargs( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - comment: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_version: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - issuer: Union[Unset, None, OsidbApiV1FlawsCvssScoresListIssuer] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - score: Union[Unset, None, float] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - vector: Union[Unset, None, str] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/cvss_scores".format( - client.base_url, - flaw_id=flaw_id, - ) - - headers: Dict[str, Any] = client.get_headers() - - json_created_dt: Union[Unset, None, str] = UNSET + comment: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_version: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + issuer: Union[Unset, OsidbApiV1FlawsCvssScoresListIssuer] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + score: Union[Unset, float] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + vector: Union[Unset, str] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + params["comment"] = comment + + json_created_dt: Union[Unset, str] = UNSET if not isinstance(created_dt, Unset): - json_created_dt = created_dt.isoformat() if created_dt else None + json_created_dt = created_dt.isoformat() + + params["created_dt"] = json_created_dt - json_created_dt_date: Union[Unset, None, str] = UNSET + json_created_dt_date: Union[Unset, str] = UNSET if not isinstance(created_dt_date, Unset): - json_created_dt_date = created_dt_date.isoformat() if created_dt_date else None + json_created_dt_date = created_dt_date.isoformat() + + params["created_dt__date"] = json_created_dt_date - json_created_dt_date_gte: Union[Unset, None, str] = UNSET + json_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_gte, Unset): - json_created_dt_date_gte = ( - created_dt_date_gte.isoformat() if created_dt_date_gte else None - ) + json_created_dt_date_gte = created_dt_date_gte.isoformat() + + params["created_dt__date__gte"] = json_created_dt_date_gte - json_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_lte, Unset): - json_created_dt_date_lte = ( - created_dt_date_lte.isoformat() if created_dt_date_lte else None - ) + json_created_dt_date_lte = created_dt_date_lte.isoformat() - json_created_dt_gt: Union[Unset, None, str] = UNSET + params["created_dt__date__lte"] = json_created_dt_date_lte + + json_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(created_dt_gt, Unset): - json_created_dt_gt = created_dt_gt.isoformat() if created_dt_gt else None + json_created_dt_gt = created_dt_gt.isoformat() + + params["created_dt__gt"] = json_created_dt_gt - json_created_dt_gte: Union[Unset, None, str] = UNSET + json_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_gte, Unset): - json_created_dt_gte = created_dt_gte.isoformat() if created_dt_gte else None + json_created_dt_gte = created_dt_gte.isoformat() - json_created_dt_lt: Union[Unset, None, str] = UNSET + params["created_dt__gte"] = json_created_dt_gte + + json_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(created_dt_lt, Unset): - json_created_dt_lt = created_dt_lt.isoformat() if created_dt_lt else None + json_created_dt_lt = created_dt_lt.isoformat() + + params["created_dt__lt"] = json_created_dt_lt - json_created_dt_lte: Union[Unset, None, str] = UNSET + json_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_lte, Unset): - json_created_dt_lte = created_dt_lte.isoformat() if created_dt_lte else None + json_created_dt_lte = created_dt_lte.isoformat() + + params["created_dt__lte"] = json_created_dt_lte - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + params["cvss_version"] = cvss_version + + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + params["exclude_fields"] = json_exclude_fields + + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields - json_issuer: Union[Unset, None, str] = UNSET + json_issuer: Union[Unset, str] = UNSET if not isinstance(issuer, Unset): + json_issuer = OsidbApiV1FlawsCvssScoresListIssuer(issuer).value - json_issuer = ( - OsidbApiV1FlawsCvssScoresListIssuer(issuer).value if issuer else None - ) + params["issuer"] = json_issuer + + params["limit"] = limit - json_updated_dt: Union[Unset, None, str] = UNSET + params["offset"] = offset + + params["score"] = score + + json_updated_dt: Union[Unset, str] = UNSET if not isinstance(updated_dt, Unset): - json_updated_dt = updated_dt.isoformat() if updated_dt else None + json_updated_dt = updated_dt.isoformat() + + params["updated_dt"] = json_updated_dt - json_updated_dt_date: Union[Unset, None, str] = UNSET + json_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(updated_dt_date, Unset): - json_updated_dt_date = updated_dt_date.isoformat() if updated_dt_date else None + json_updated_dt_date = updated_dt_date.isoformat() - json_updated_dt_date_gte: Union[Unset, None, str] = UNSET + params["updated_dt__date"] = json_updated_dt_date + + json_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_gte, Unset): - json_updated_dt_date_gte = ( - updated_dt_date_gte.isoformat() if updated_dt_date_gte else None - ) + json_updated_dt_date_gte = updated_dt_date_gte.isoformat() + + params["updated_dt__date__gte"] = json_updated_dt_date_gte - json_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_lte, Unset): - json_updated_dt_date_lte = ( - updated_dt_date_lte.isoformat() if updated_dt_date_lte else None - ) + json_updated_dt_date_lte = updated_dt_date_lte.isoformat() + + params["updated_dt__date__lte"] = json_updated_dt_date_lte - json_updated_dt_gt: Union[Unset, None, str] = UNSET + json_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(updated_dt_gt, Unset): - json_updated_dt_gt = updated_dt_gt.isoformat() if updated_dt_gt else None + json_updated_dt_gt = updated_dt_gt.isoformat() + + params["updated_dt__gt"] = json_updated_dt_gt - json_updated_dt_gte: Union[Unset, None, str] = UNSET + json_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_gte, Unset): - json_updated_dt_gte = updated_dt_gte.isoformat() if updated_dt_gte else None + json_updated_dt_gte = updated_dt_gte.isoformat() - json_updated_dt_lt: Union[Unset, None, str] = UNSET + params["updated_dt__gte"] = json_updated_dt_gte + + json_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(updated_dt_lt, Unset): - json_updated_dt_lt = updated_dt_lt.isoformat() if updated_dt_lt else None + json_updated_dt_lt = updated_dt_lt.isoformat() + + params["updated_dt__lt"] = json_updated_dt_lt - json_updated_dt_lte: Union[Unset, None, str] = UNSET + json_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_lte, Unset): - json_updated_dt_lte = updated_dt_lte.isoformat() if updated_dt_lte else None - - params: Dict[str, Any] = { - "comment": comment, - "created_dt": json_created_dt, - "created_dt__date": json_created_dt_date, - "created_dt__date__gte": json_created_dt_date_gte, - "created_dt__date__lte": json_created_dt_date_lte, - "created_dt__gt": json_created_dt_gt, - "created_dt__gte": json_created_dt_gte, - "created_dt__lt": json_created_dt_lt, - "created_dt__lte": json_created_dt_lte, - "cvss_version": cvss_version, - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - "issuer": json_issuer, - "limit": limit, - "offset": offset, - "score": score, - "updated_dt": json_updated_dt, - "updated_dt__date": json_updated_dt_date, - "updated_dt__date__gte": json_updated_dt_date_gte, - "updated_dt__date__lte": json_updated_dt_date_lte, - "updated_dt__gt": json_updated_dt_gt, - "updated_dt__gte": json_updated_dt_gte, - "updated_dt__lt": json_updated_dt_lt, - "updated_dt__lte": json_updated_dt_lte, - "uuid": uuid, - "vector": vector, - } + json_updated_dt_lte = updated_dt_lte.isoformat() + + params["updated_dt__lte"] = json_updated_dt_lte + + json_uuid: Union[Unset, str] = UNSET + if not isinstance(uuid, Unset): + json_uuid = str(uuid) + + params["uuid"] = json_uuid + + params["vector"] = vector + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/cvss_scores".format( + flaw_id=flaw_id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsCvssScoresListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsCvssScoresListResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsCvssScoresListResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsCvssScoresListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsCvssScoresListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - comment: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_version: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - issuer: Union[Unset, None, OsidbApiV1FlawsCvssScoresListIssuer] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - score: Union[Unset, None, float] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - vector: Union[Unset, None, str] = UNSET, + comment: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_version: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + issuer: Union[Unset, OsidbApiV1FlawsCvssScoresListIssuer] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + score: Union[Unset, float] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + vector: Union[Unset, str] = UNSET, ) -> Response[OsidbApiV1FlawsCvssScoresListResponse200]: + """ + Args: + flaw_id (UUID): + comment (Union[Unset, str]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cvss_version (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + issuer (Union[Unset, OsidbApiV1FlawsCvssScoresListIssuer]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + score (Union[Unset, float]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + vector (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCvssScoresListResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, @@ -308,41 +355,77 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - comment: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_version: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - issuer: Union[Unset, None, OsidbApiV1FlawsCvssScoresListIssuer] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - score: Union[Unset, None, float] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - vector: Union[Unset, None, str] = UNSET, + comment: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_version: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + issuer: Union[Unset, OsidbApiV1FlawsCvssScoresListIssuer] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + score: Union[Unset, float] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + vector: Union[Unset, str] = UNSET, ) -> Optional[OsidbApiV1FlawsCvssScoresListResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + comment (Union[Unset, str]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cvss_version (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + issuer (Union[Unset, OsidbApiV1FlawsCvssScoresListIssuer]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + score (Union[Unset, float]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + vector (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCvssScoresListResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -376,37 +459,75 @@ def sync( ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, *, client: AuthenticatedClient, - comment: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_version: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - issuer: Union[Unset, None, OsidbApiV1FlawsCvssScoresListIssuer] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - score: Union[Unset, None, float] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - vector: Union[Unset, None, str] = UNSET, + comment: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_version: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + issuer: Union[Unset, OsidbApiV1FlawsCvssScoresListIssuer] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + score: Union[Unset, float] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + vector: Union[Unset, str] = UNSET, ) -> Response[OsidbApiV1FlawsCvssScoresListResponse200]: + """ + Args: + flaw_id (UUID): + comment (Union[Unset, str]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cvss_version (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + issuer (Union[Unset, OsidbApiV1FlawsCvssScoresListIssuer]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + score (Union[Unset, float]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + vector (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCvssScoresListResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, @@ -446,44 +567,80 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, *, client: AuthenticatedClient, - comment: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_version: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - issuer: Union[Unset, None, OsidbApiV1FlawsCvssScoresListIssuer] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - score: Union[Unset, None, float] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - vector: Union[Unset, None, str] = UNSET, + comment: Union[Unset, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_version: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + issuer: Union[Unset, OsidbApiV1FlawsCvssScoresListIssuer] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + score: Union[Unset, float] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + vector: Union[Unset, str] = UNSET, ) -> Optional[OsidbApiV1FlawsCvssScoresListResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + comment (Union[Unset, str]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cvss_version (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + issuer (Union[Unset, OsidbApiV1FlawsCvssScoresListIssuer]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + score (Union[Unset, float]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + vector (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCvssScoresListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, client=client, comment=comment, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_retrieve.py index d8311d3..c66a770 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_retrieve.py @@ -1,98 +1,105 @@ -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_cvss_scores_retrieve_response_200 import ( OsidbApiV1FlawsCvssScoresRetrieveResponse200, ) from ...types import UNSET, Response, Unset QUERY_PARAMS = { - "exclude_fields": List[str], - "include_fields": List[str], + "exclude_fields": list[str], + "include_fields": list[str], } def _get_kwargs( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/cvss_scores/{id}".format( - client.base_url, - flaw_id=flaw_id, - id=id, - ) + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - headers: Dict[str, Any] = client.get_headers() - - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields - params: Dict[str, Any] = { - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/cvss_scores/{id}".format( + flaw_id=flaw_id, + id=id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsCvssScoresRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsCvssScoresRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsCvssScoresRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsCvssScoresRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsCvssScoresRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1FlawsCvssScoresRetrieveResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCvssScoresRetrieveResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -109,18 +116,31 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1FlawsCvssScoresRetrieveResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCvssScoresRetrieveResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -131,14 +151,29 @@ def sync( ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1FlawsCvssScoresRetrieveResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCvssScoresRetrieveResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -155,21 +190,34 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1FlawsCvssScoresRetrieveResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCvssScoresRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, id=id, client=client, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_update.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_update.py index a2ce149..9da03e4 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_update.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_cvss_scores_update.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.flaw_cvss_put import FlawCVSSPut from ...models.osidb_api_v1_flaws_cvss_scores_update_response_200 import ( OsidbApiV1FlawsCvssScoresUpdateResponse200, @@ -10,85 +12,101 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = FlawCVSSPut def _get_kwargs( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawCVSSPut, - multipart_data: FlawCVSSPut, - json_body: FlawCVSSPut, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/cvss_scores/{id}".format( - client.base_url, - flaw_id=flaw_id, - id=id, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + FlawCVSSPut, + FlawCVSSPut, + FlawCVSSPut, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/cvss_scores/{id}".format( + flaw_id=flaw_id, + id=id, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, FlawCVSSPut): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsCvssScoresUpdateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsCvssScoresUpdateResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsCvssScoresUpdateResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsCvssScoresUpdateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsCvssScoresUpdateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawCVSSPut, - multipart_data: FlawCVSSPut, - json_body: FlawCVSSPut, + body: Union[ + FlawCVSSPut, + FlawCVSSPut, + FlawCVSSPut, + ], ) -> Response[OsidbApiV1FlawsCvssScoresUpdateResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawCVSSPut): FlawCVSS serializer + body (FlawCVSSPut): FlawCVSS serializer + body (FlawCVSSPut): FlawCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCvssScoresUpdateResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.put( @@ -99,46 +117,78 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawCVSSPut, - multipart_data: FlawCVSSPut, - json_body: FlawCVSSPut, + body: Union[ + FlawCVSSPut, + FlawCVSSPut, + FlawCVSSPut, + ], ) -> Optional[OsidbApiV1FlawsCvssScoresUpdateResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawCVSSPut): FlawCVSS serializer + body (FlawCVSSPut): FlawCVSS serializer + body (FlawCVSSPut): FlawCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCvssScoresUpdateResponse200 + """ return sync_detailed( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawCVSSPut, - multipart_data: FlawCVSSPut, - json_body: FlawCVSSPut, + body: Union[ + FlawCVSSPut, + FlawCVSSPut, + FlawCVSSPut, + ], ) -> Response[OsidbApiV1FlawsCvssScoresUpdateResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawCVSSPut): FlawCVSS serializer + body (FlawCVSSPut): FlawCVSS serializer + body (FlawCVSSPut): FlawCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsCvssScoresUpdateResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().put( @@ -149,27 +199,42 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawCVSSPut, - multipart_data: FlawCVSSPut, - json_body: FlawCVSSPut, + body: Union[ + FlawCVSSPut, + FlawCVSSPut, + FlawCVSSPut, + ], ) -> Optional[OsidbApiV1FlawsCvssScoresUpdateResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawCVSSPut): FlawCVSS serializer + body (FlawCVSSPut): FlawCVSS serializer + body (FlawCVSSPut): FlawCVSS serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsCvssScoresUpdateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_list.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_list.py index a13bc00..897be29 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_list.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_list.py @@ -1,9 +1,11 @@ import datetime -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_list_affects_affectedness import ( OsidbApiV1FlawsListAffectsAffectedness, ) @@ -62,7 +64,7 @@ "acknowledgments__updated_dt__gte": datetime.datetime, "acknowledgments__updated_dt__lt": datetime.datetime, "acknowledgments__updated_dt__lte": datetime.datetime, - "acknowledgments__uuid": str, + "acknowledgments__uuid": UUID, "affects__affectedness": OsidbApiV1FlawsListAffectsAffectedness, "affects__created_dt": datetime.datetime, "affects__created_dt__date": datetime.date, @@ -109,7 +111,7 @@ "affects__trackers__updated_dt__gte": datetime.datetime, "affects__trackers__updated_dt__lt": datetime.datetime, "affects__trackers__updated_dt__lte": datetime.datetime, - "affects__trackers__uuid": str, + "affects__trackers__uuid": UUID, "affects__updated_dt": datetime.datetime, "affects__updated_dt__date": datetime.date, "affects__updated_dt__date__gte": datetime.date, @@ -118,12 +120,12 @@ "affects__updated_dt__gte": datetime.datetime, "affects__updated_dt__lt": datetime.datetime, "affects__updated_dt__lte": datetime.datetime, - "affects__uuid": str, + "affects__uuid": UUID, "bz_id": float, "changed_after": datetime.datetime, "changed_before": datetime.datetime, "comment_zero": str, - "components": List[str], + "components": list[str], "created_dt": datetime.datetime, "created_dt__date": datetime.date, "created_dt__date__gte": datetime.date, @@ -134,7 +136,7 @@ "created_dt__lte": datetime.datetime, "cve_description": str, "cve_description__isempty": bool, - "cve_id": List[str], + "cve_id": list[str], "cve_id__isempty": bool, "cvss2_nist__isempty": bool, "cvss2_rh__isempty": bool, @@ -162,15 +164,15 @@ "cvss_scores__updated_dt__gte": datetime.datetime, "cvss_scores__updated_dt__lt": datetime.datetime, "cvss_scores__updated_dt__lte": datetime.datetime, - "cvss_scores__uuid": str, + "cvss_scores__uuid": UUID, "cvss_scores__vector": str, "cwe_id": str, "cwe_id__isempty": bool, "embargoed": bool, - "exclude_fields": List[str], + "exclude_fields": list[str], "impact": OsidbApiV1FlawsListImpact, - "include_fields": List[str], - "include_meta_attr": List[str], + "include_fields": list[str], + "include_meta_attr": list[str], "limit": int, "major_incident_start_dt": datetime.datetime, "major_incident_start_dt__date": datetime.date, @@ -184,7 +186,7 @@ "mitigation__isempty": bool, "nist_cvss_validation": OsidbApiV1FlawsListNistCvssValidation, "offset": int, - "order": List[OsidbApiV1FlawsListOrderItem], + "order": list[OsidbApiV1FlawsListOrderItem], "owner": str, "owner__isempty": bool, "query": str, @@ -207,7 +209,7 @@ "references__updated_dt__lt": datetime.datetime, "references__updated_dt__lte": datetime.datetime, "references__url": str, - "references__uuid": str, + "references__uuid": UUID, "reported_dt": datetime.datetime, "reported_dt__date": datetime.date, "reported_dt__date__gte": datetime.date, @@ -223,7 +225,7 @@ "statement__isempty": bool, "team_id": str, "title": str, - "tracker_ids": List[str], + "tracker_ids": list[str], "unembargo_dt": datetime.datetime, "updated_dt": datetime.datetime, "updated_dt__date": datetime.date, @@ -233,1441 +235,1212 @@ "updated_dt__gte": datetime.datetime, "updated_dt__lt": datetime.datetime, "updated_dt__lte": datetime.datetime, - "uuid": str, - "workflow_state": List[OsidbApiV1FlawsListWorkflowStateItem], + "uuid": UUID, + "workflow_state": list[OsidbApiV1FlawsListWorkflowStateItem], } def _get_kwargs( *, client: AuthenticatedClient, - acknowledgments_affiliation: Union[Unset, None, str] = UNSET, - acknowledgments_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_from_upstream: Union[Unset, None, bool] = UNSET, - acknowledgments_name: Union[Unset, None, str] = UNSET, - acknowledgments_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_uuid: Union[Unset, None, str] = UNSET, - affects_affectedness: Union[ - Unset, None, OsidbApiV1FlawsListAffectsAffectedness - ] = UNSET, - affects_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_embargoed: Union[Unset, None, bool] = UNSET, - affects_impact: Union[Unset, None, OsidbApiV1FlawsListAffectsImpact] = UNSET, - affects_ps_component: Union[Unset, None, str] = UNSET, - affects_ps_module: Union[Unset, None, str] = UNSET, - affects_resolution: Union[ - Unset, None, OsidbApiV1FlawsListAffectsResolution - ] = UNSET, - affects_trackers_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_embargoed: Union[Unset, None, bool] = UNSET, - affects_trackers_errata_advisory_name: Union[Unset, None, str] = UNSET, - affects_trackers_errata_et_id: Union[Unset, None, int] = UNSET, - affects_trackers_errata_shipped_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_errata_shipped_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_errata_shipped_dt_date_gte: Union[ - Unset, None, datetime.date - ] = UNSET, - affects_trackers_errata_shipped_dt_date_lte: Union[ - Unset, None, datetime.date - ] = UNSET, - affects_trackers_errata_shipped_dt_gt: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_gte: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_lt: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_lte: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_external_system_id: Union[Unset, None, str] = UNSET, - affects_trackers_ps_update_stream: Union[Unset, None, str] = UNSET, - affects_trackers_resolution: Union[Unset, None, str] = UNSET, - affects_trackers_status: Union[Unset, None, str] = UNSET, - affects_trackers_type: Union[ - Unset, None, OsidbApiV1FlawsListAffectsTrackersType - ] = UNSET, - affects_trackers_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_uuid: Union[Unset, None, str] = UNSET, - affects_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_uuid: Union[Unset, None, str] = UNSET, - bz_id: Union[Unset, None, float] = UNSET, - changed_after: Union[Unset, None, datetime.datetime] = UNSET, - changed_before: Union[Unset, None, datetime.datetime] = UNSET, - comment_zero: Union[Unset, None, str] = UNSET, - components: Union[Unset, None, List[str]] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cve_description: Union[Unset, None, str] = UNSET, - cve_description_isempty: Union[Unset, None, bool] = UNSET, - cve_id: Union[Unset, None, List[str]] = UNSET, - cve_id_isempty: Union[Unset, None, bool] = UNSET, - cvss2_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss2_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss3_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss3_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss4_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss4_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss_scores_comment: Union[Unset, None, str] = UNSET, - cvss_scores_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_cvss_version: Union[Unset, None, str] = UNSET, - cvss_scores_issuer: Union[Unset, None, OsidbApiV1FlawsListCvssScoresIssuer] = UNSET, - cvss_scores_score: Union[Unset, None, float] = UNSET, - cvss_scores_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_uuid: Union[Unset, None, str] = UNSET, - cvss_scores_vector: Union[Unset, None, str] = UNSET, - cwe_id: Union[Unset, None, str] = UNSET, - cwe_id_isempty: Union[Unset, None, bool] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - impact: Union[Unset, None, OsidbApiV1FlawsListImpact] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - major_incident_start_dt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_date: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_state: Union[ - Unset, None, OsidbApiV1FlawsListMajorIncidentState - ] = UNSET, - mitigation_isempty: Union[Unset, None, bool] = UNSET, - nist_cvss_validation: Union[ - Unset, None, OsidbApiV1FlawsListNistCvssValidation - ] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1FlawsListOrderItem]] = UNSET, - owner: Union[Unset, None, str] = UNSET, - owner_isempty: Union[Unset, None, bool] = UNSET, - query: Union[Unset, None, str] = UNSET, - references_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - references_description: Union[Unset, None, str] = UNSET, - references_type: Union[Unset, None, OsidbApiV1FlawsListReferencesType] = UNSET, - references_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - references_url: Union[Unset, None, str] = UNSET, - references_uuid: Union[Unset, None, str] = UNSET, - reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - requires_cve_description: Union[ - Unset, None, OsidbApiV1FlawsListRequiresCveDescription - ] = UNSET, - search: Union[Unset, None, str] = UNSET, - source: Union[Unset, None, OsidbApiV1FlawsListSource] = UNSET, - statement: Union[Unset, None, str] = UNSET, - statement_isempty: Union[Unset, None, bool] = UNSET, - team_id: Union[Unset, None, str] = UNSET, - title: Union[Unset, None, str] = UNSET, - tracker_ids: Union[Unset, None, List[str]] = UNSET, - unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - workflow_state: Union[ - Unset, None, List[OsidbApiV1FlawsListWorkflowStateItem] - ] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - json_acknowledgments_created_dt: Union[Unset, None, str] = UNSET + acknowledgments_affiliation: Union[Unset, str] = UNSET, + acknowledgments_created_dt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_date: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_from_upstream: Union[Unset, bool] = UNSET, + acknowledgments_name: Union[Unset, str] = UNSET, + acknowledgments_updated_dt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_date: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_uuid: Union[Unset, UUID] = UNSET, + affects_affectedness: Union[Unset, OsidbApiV1FlawsListAffectsAffectedness] = UNSET, + affects_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_embargoed: Union[Unset, bool] = UNSET, + affects_impact: Union[Unset, OsidbApiV1FlawsListAffectsImpact] = UNSET, + affects_ps_component: Union[Unset, str] = UNSET, + affects_ps_module: Union[Unset, str] = UNSET, + affects_resolution: Union[Unset, OsidbApiV1FlawsListAffectsResolution] = UNSET, + affects_trackers_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_embargoed: Union[Unset, bool] = UNSET, + affects_trackers_errata_advisory_name: Union[Unset, str] = UNSET, + affects_trackers_errata_et_id: Union[Unset, int] = UNSET, + affects_trackers_errata_shipped_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_external_system_id: Union[Unset, str] = UNSET, + affects_trackers_ps_update_stream: Union[Unset, str] = UNSET, + affects_trackers_resolution: Union[Unset, str] = UNSET, + affects_trackers_status: Union[Unset, str] = UNSET, + affects_trackers_type: Union[Unset, OsidbApiV1FlawsListAffectsTrackersType] = UNSET, + affects_trackers_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_uuid: Union[Unset, UUID] = UNSET, + affects_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_uuid: Union[Unset, UUID] = UNSET, + bz_id: Union[Unset, float] = UNSET, + changed_after: Union[Unset, datetime.datetime] = UNSET, + changed_before: Union[Unset, datetime.datetime] = UNSET, + comment_zero: Union[Unset, str] = UNSET, + components: Union[Unset, list[str]] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cve_description: Union[Unset, str] = UNSET, + cve_description_isempty: Union[Unset, bool] = UNSET, + cve_id: Union[Unset, list[str]] = UNSET, + cve_id_isempty: Union[Unset, bool] = UNSET, + cvss2_nist_isempty: Union[Unset, bool] = UNSET, + cvss2_rh_isempty: Union[Unset, bool] = UNSET, + cvss3_nist_isempty: Union[Unset, bool] = UNSET, + cvss3_rh_isempty: Union[Unset, bool] = UNSET, + cvss4_nist_isempty: Union[Unset, bool] = UNSET, + cvss4_rh_isempty: Union[Unset, bool] = UNSET, + cvss_scores_comment: Union[Unset, str] = UNSET, + cvss_scores_created_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_cvss_version: Union[Unset, str] = UNSET, + cvss_scores_issuer: Union[Unset, OsidbApiV1FlawsListCvssScoresIssuer] = UNSET, + cvss_scores_score: Union[Unset, float] = UNSET, + cvss_scores_updated_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_uuid: Union[Unset, UUID] = UNSET, + cvss_scores_vector: Union[Unset, str] = UNSET, + cwe_id: Union[Unset, str] = UNSET, + cwe_id_isempty: Union[Unset, bool] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + impact: Union[Unset, OsidbApiV1FlawsListImpact] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + major_incident_start_dt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_date: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_date_gte: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_date_lte: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_gt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_gte: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_lt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_lte: Union[Unset, datetime.datetime] = UNSET, + major_incident_state: Union[Unset, OsidbApiV1FlawsListMajorIncidentState] = UNSET, + mitigation_isempty: Union[Unset, bool] = UNSET, + nist_cvss_validation: Union[Unset, OsidbApiV1FlawsListNistCvssValidation] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1FlawsListOrderItem]] = UNSET, + owner: Union[Unset, str] = UNSET, + owner_isempty: Union[Unset, bool] = UNSET, + query: Union[Unset, str] = UNSET, + references_created_dt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_date: Union[Unset, datetime.date] = UNSET, + references_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + references_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + references_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + references_description: Union[Unset, str] = UNSET, + references_type: Union[Unset, OsidbApiV1FlawsListReferencesType] = UNSET, + references_updated_dt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_date: Union[Unset, datetime.date] = UNSET, + references_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + references_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + references_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + references_url: Union[Unset, str] = UNSET, + references_uuid: Union[Unset, UUID] = UNSET, + reported_dt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_date: Union[Unset, datetime.date] = UNSET, + reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + requires_cve_description: Union[Unset, OsidbApiV1FlawsListRequiresCveDescription] = UNSET, + search: Union[Unset, str] = UNSET, + source: Union[Unset, OsidbApiV1FlawsListSource] = UNSET, + statement: Union[Unset, str] = UNSET, + statement_isempty: Union[Unset, bool] = UNSET, + team_id: Union[Unset, str] = UNSET, + title: Union[Unset, str] = UNSET, + tracker_ids: Union[Unset, list[str]] = UNSET, + unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + workflow_state: Union[Unset, list[OsidbApiV1FlawsListWorkflowStateItem]] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + params["acknowledgments__affiliation"] = acknowledgments_affiliation + + json_acknowledgments_created_dt: Union[Unset, str] = UNSET if not isinstance(acknowledgments_created_dt, Unset): - json_acknowledgments_created_dt = ( - acknowledgments_created_dt.isoformat() - if acknowledgments_created_dt - else None - ) + json_acknowledgments_created_dt = acknowledgments_created_dt.isoformat() + + params["acknowledgments__created_dt"] = json_acknowledgments_created_dt - json_acknowledgments_created_dt_date: Union[Unset, None, str] = UNSET + json_acknowledgments_created_dt_date: Union[Unset, str] = UNSET if not isinstance(acknowledgments_created_dt_date, Unset): - json_acknowledgments_created_dt_date = ( - acknowledgments_created_dt_date.isoformat() - if acknowledgments_created_dt_date - else None - ) + json_acknowledgments_created_dt_date = acknowledgments_created_dt_date.isoformat() + + params["acknowledgments__created_dt__date"] = json_acknowledgments_created_dt_date - json_acknowledgments_created_dt_date_gte: Union[Unset, None, str] = UNSET + json_acknowledgments_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(acknowledgments_created_dt_date_gte, Unset): - json_acknowledgments_created_dt_date_gte = ( - acknowledgments_created_dt_date_gte.isoformat() - if acknowledgments_created_dt_date_gte - else None - ) + json_acknowledgments_created_dt_date_gte = acknowledgments_created_dt_date_gte.isoformat() - json_acknowledgments_created_dt_date_lte: Union[Unset, None, str] = UNSET + params["acknowledgments__created_dt__date__gte"] = json_acknowledgments_created_dt_date_gte + + json_acknowledgments_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(acknowledgments_created_dt_date_lte, Unset): - json_acknowledgments_created_dt_date_lte = ( - acknowledgments_created_dt_date_lte.isoformat() - if acknowledgments_created_dt_date_lte - else None - ) + json_acknowledgments_created_dt_date_lte = acknowledgments_created_dt_date_lte.isoformat() - json_acknowledgments_created_dt_gt: Union[Unset, None, str] = UNSET + params["acknowledgments__created_dt__date__lte"] = json_acknowledgments_created_dt_date_lte + + json_acknowledgments_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(acknowledgments_created_dt_gt, Unset): - json_acknowledgments_created_dt_gt = ( - acknowledgments_created_dt_gt.isoformat() - if acknowledgments_created_dt_gt - else None - ) + json_acknowledgments_created_dt_gt = acknowledgments_created_dt_gt.isoformat() + + params["acknowledgments__created_dt__gt"] = json_acknowledgments_created_dt_gt - json_acknowledgments_created_dt_gte: Union[Unset, None, str] = UNSET + json_acknowledgments_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(acknowledgments_created_dt_gte, Unset): - json_acknowledgments_created_dt_gte = ( - acknowledgments_created_dt_gte.isoformat() - if acknowledgments_created_dt_gte - else None - ) + json_acknowledgments_created_dt_gte = acknowledgments_created_dt_gte.isoformat() + + params["acknowledgments__created_dt__gte"] = json_acknowledgments_created_dt_gte - json_acknowledgments_created_dt_lt: Union[Unset, None, str] = UNSET + json_acknowledgments_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(acknowledgments_created_dt_lt, Unset): - json_acknowledgments_created_dt_lt = ( - acknowledgments_created_dt_lt.isoformat() - if acknowledgments_created_dt_lt - else None - ) + json_acknowledgments_created_dt_lt = acknowledgments_created_dt_lt.isoformat() + + params["acknowledgments__created_dt__lt"] = json_acknowledgments_created_dt_lt - json_acknowledgments_created_dt_lte: Union[Unset, None, str] = UNSET + json_acknowledgments_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(acknowledgments_created_dt_lte, Unset): - json_acknowledgments_created_dt_lte = ( - acknowledgments_created_dt_lte.isoformat() - if acknowledgments_created_dt_lte - else None - ) + json_acknowledgments_created_dt_lte = acknowledgments_created_dt_lte.isoformat() + + params["acknowledgments__created_dt__lte"] = json_acknowledgments_created_dt_lte + + params["acknowledgments__from_upstream"] = acknowledgments_from_upstream - json_acknowledgments_updated_dt: Union[Unset, None, str] = UNSET + params["acknowledgments__name"] = acknowledgments_name + + json_acknowledgments_updated_dt: Union[Unset, str] = UNSET if not isinstance(acknowledgments_updated_dt, Unset): - json_acknowledgments_updated_dt = ( - acknowledgments_updated_dt.isoformat() - if acknowledgments_updated_dt - else None - ) + json_acknowledgments_updated_dt = acknowledgments_updated_dt.isoformat() + + params["acknowledgments__updated_dt"] = json_acknowledgments_updated_dt - json_acknowledgments_updated_dt_date: Union[Unset, None, str] = UNSET + json_acknowledgments_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(acknowledgments_updated_dt_date, Unset): - json_acknowledgments_updated_dt_date = ( - acknowledgments_updated_dt_date.isoformat() - if acknowledgments_updated_dt_date - else None - ) + json_acknowledgments_updated_dt_date = acknowledgments_updated_dt_date.isoformat() + + params["acknowledgments__updated_dt__date"] = json_acknowledgments_updated_dt_date - json_acknowledgments_updated_dt_date_gte: Union[Unset, None, str] = UNSET + json_acknowledgments_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(acknowledgments_updated_dt_date_gte, Unset): - json_acknowledgments_updated_dt_date_gte = ( - acknowledgments_updated_dt_date_gte.isoformat() - if acknowledgments_updated_dt_date_gte - else None - ) + json_acknowledgments_updated_dt_date_gte = acknowledgments_updated_dt_date_gte.isoformat() + + params["acknowledgments__updated_dt__date__gte"] = json_acknowledgments_updated_dt_date_gte - json_acknowledgments_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_acknowledgments_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(acknowledgments_updated_dt_date_lte, Unset): - json_acknowledgments_updated_dt_date_lte = ( - acknowledgments_updated_dt_date_lte.isoformat() - if acknowledgments_updated_dt_date_lte - else None - ) + json_acknowledgments_updated_dt_date_lte = acknowledgments_updated_dt_date_lte.isoformat() - json_acknowledgments_updated_dt_gt: Union[Unset, None, str] = UNSET + params["acknowledgments__updated_dt__date__lte"] = json_acknowledgments_updated_dt_date_lte + + json_acknowledgments_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(acknowledgments_updated_dt_gt, Unset): - json_acknowledgments_updated_dt_gt = ( - acknowledgments_updated_dt_gt.isoformat() - if acknowledgments_updated_dt_gt - else None - ) + json_acknowledgments_updated_dt_gt = acknowledgments_updated_dt_gt.isoformat() - json_acknowledgments_updated_dt_gte: Union[Unset, None, str] = UNSET + params["acknowledgments__updated_dt__gt"] = json_acknowledgments_updated_dt_gt + + json_acknowledgments_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(acknowledgments_updated_dt_gte, Unset): - json_acknowledgments_updated_dt_gte = ( - acknowledgments_updated_dt_gte.isoformat() - if acknowledgments_updated_dt_gte - else None - ) + json_acknowledgments_updated_dt_gte = acknowledgments_updated_dt_gte.isoformat() + + params["acknowledgments__updated_dt__gte"] = json_acknowledgments_updated_dt_gte - json_acknowledgments_updated_dt_lt: Union[Unset, None, str] = UNSET + json_acknowledgments_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(acknowledgments_updated_dt_lt, Unset): - json_acknowledgments_updated_dt_lt = ( - acknowledgments_updated_dt_lt.isoformat() - if acknowledgments_updated_dt_lt - else None - ) + json_acknowledgments_updated_dt_lt = acknowledgments_updated_dt_lt.isoformat() + + params["acknowledgments__updated_dt__lt"] = json_acknowledgments_updated_dt_lt - json_acknowledgments_updated_dt_lte: Union[Unset, None, str] = UNSET + json_acknowledgments_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(acknowledgments_updated_dt_lte, Unset): - json_acknowledgments_updated_dt_lte = ( - acknowledgments_updated_dt_lte.isoformat() - if acknowledgments_updated_dt_lte - else None - ) + json_acknowledgments_updated_dt_lte = acknowledgments_updated_dt_lte.isoformat() + + params["acknowledgments__updated_dt__lte"] = json_acknowledgments_updated_dt_lte + + json_acknowledgments_uuid: Union[Unset, str] = UNSET + if not isinstance(acknowledgments_uuid, Unset): + json_acknowledgments_uuid = str(acknowledgments_uuid) - json_affects_affectedness: Union[Unset, None, str] = UNSET + params["acknowledgments__uuid"] = json_acknowledgments_uuid + + json_affects_affectedness: Union[Unset, str] = UNSET if not isinstance(affects_affectedness, Unset): + json_affects_affectedness = OsidbApiV1FlawsListAffectsAffectedness(affects_affectedness).value - json_affects_affectedness = ( - OsidbApiV1FlawsListAffectsAffectedness(affects_affectedness).value - if affects_affectedness - else None - ) + params["affects__affectedness"] = json_affects_affectedness - json_affects_created_dt: Union[Unset, None, str] = UNSET + json_affects_created_dt: Union[Unset, str] = UNSET if not isinstance(affects_created_dt, Unset): - json_affects_created_dt = ( - affects_created_dt.isoformat() if affects_created_dt else None - ) + json_affects_created_dt = affects_created_dt.isoformat() + + params["affects__created_dt"] = json_affects_created_dt - json_affects_created_dt_date: Union[Unset, None, str] = UNSET + json_affects_created_dt_date: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_date, Unset): - json_affects_created_dt_date = ( - affects_created_dt_date.isoformat() if affects_created_dt_date else None - ) + json_affects_created_dt_date = affects_created_dt_date.isoformat() - json_affects_created_dt_date_gte: Union[Unset, None, str] = UNSET + params["affects__created_dt__date"] = json_affects_created_dt_date + + json_affects_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_date_gte, Unset): - json_affects_created_dt_date_gte = ( - affects_created_dt_date_gte.isoformat() - if affects_created_dt_date_gte - else None - ) + json_affects_created_dt_date_gte = affects_created_dt_date_gte.isoformat() - json_affects_created_dt_date_lte: Union[Unset, None, str] = UNSET + params["affects__created_dt__date__gte"] = json_affects_created_dt_date_gte + + json_affects_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_date_lte, Unset): - json_affects_created_dt_date_lte = ( - affects_created_dt_date_lte.isoformat() - if affects_created_dt_date_lte - else None - ) + json_affects_created_dt_date_lte = affects_created_dt_date_lte.isoformat() + + params["affects__created_dt__date__lte"] = json_affects_created_dt_date_lte - json_affects_created_dt_gt: Union[Unset, None, str] = UNSET + json_affects_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_gt, Unset): - json_affects_created_dt_gt = ( - affects_created_dt_gt.isoformat() if affects_created_dt_gt else None - ) + json_affects_created_dt_gt = affects_created_dt_gt.isoformat() + + params["affects__created_dt__gt"] = json_affects_created_dt_gt - json_affects_created_dt_gte: Union[Unset, None, str] = UNSET + json_affects_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_gte, Unset): - json_affects_created_dt_gte = ( - affects_created_dt_gte.isoformat() if affects_created_dt_gte else None - ) + json_affects_created_dt_gte = affects_created_dt_gte.isoformat() + + params["affects__created_dt__gte"] = json_affects_created_dt_gte - json_affects_created_dt_lt: Union[Unset, None, str] = UNSET + json_affects_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_lt, Unset): - json_affects_created_dt_lt = ( - affects_created_dt_lt.isoformat() if affects_created_dt_lt else None - ) + json_affects_created_dt_lt = affects_created_dt_lt.isoformat() - json_affects_created_dt_lte: Union[Unset, None, str] = UNSET + params["affects__created_dt__lt"] = json_affects_created_dt_lt + + json_affects_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_lte, Unset): - json_affects_created_dt_lte = ( - affects_created_dt_lte.isoformat() if affects_created_dt_lte else None - ) + json_affects_created_dt_lte = affects_created_dt_lte.isoformat() - json_affects_impact: Union[Unset, None, str] = UNSET + params["affects__created_dt__lte"] = json_affects_created_dt_lte + + params["affects__embargoed"] = affects_embargoed + + json_affects_impact: Union[Unset, str] = UNSET if not isinstance(affects_impact, Unset): + json_affects_impact = OsidbApiV1FlawsListAffectsImpact(affects_impact).value - json_affects_impact = ( - OsidbApiV1FlawsListAffectsImpact(affects_impact).value - if affects_impact - else None - ) + params["affects__impact"] = json_affects_impact + + params["affects__ps_component"] = affects_ps_component + + params["affects__ps_module"] = affects_ps_module - json_affects_resolution: Union[Unset, None, str] = UNSET + json_affects_resolution: Union[Unset, str] = UNSET if not isinstance(affects_resolution, Unset): + json_affects_resolution = OsidbApiV1FlawsListAffectsResolution(affects_resolution).value - json_affects_resolution = ( - OsidbApiV1FlawsListAffectsResolution(affects_resolution).value - if affects_resolution - else None - ) + params["affects__resolution"] = json_affects_resolution - json_affects_trackers_created_dt: Union[Unset, None, str] = UNSET + json_affects_trackers_created_dt: Union[Unset, str] = UNSET if not isinstance(affects_trackers_created_dt, Unset): - json_affects_trackers_created_dt = ( - affects_trackers_created_dt.isoformat() - if affects_trackers_created_dt - else None - ) + json_affects_trackers_created_dt = affects_trackers_created_dt.isoformat() + + params["affects__trackers__created_dt"] = json_affects_trackers_created_dt - json_affects_trackers_created_dt_date: Union[Unset, None, str] = UNSET + json_affects_trackers_created_dt_date: Union[Unset, str] = UNSET if not isinstance(affects_trackers_created_dt_date, Unset): - json_affects_trackers_created_dt_date = ( - affects_trackers_created_dt_date.isoformat() - if affects_trackers_created_dt_date - else None - ) + json_affects_trackers_created_dt_date = affects_trackers_created_dt_date.isoformat() + + params["affects__trackers__created_dt__date"] = json_affects_trackers_created_dt_date - json_affects_trackers_created_dt_date_gte: Union[Unset, None, str] = UNSET + json_affects_trackers_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(affects_trackers_created_dt_date_gte, Unset): - json_affects_trackers_created_dt_date_gte = ( - affects_trackers_created_dt_date_gte.isoformat() - if affects_trackers_created_dt_date_gte - else None - ) + json_affects_trackers_created_dt_date_gte = affects_trackers_created_dt_date_gte.isoformat() + + params["affects__trackers__created_dt__date__gte"] = json_affects_trackers_created_dt_date_gte - json_affects_trackers_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_affects_trackers_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(affects_trackers_created_dt_date_lte, Unset): - json_affects_trackers_created_dt_date_lte = ( - affects_trackers_created_dt_date_lte.isoformat() - if affects_trackers_created_dt_date_lte - else None - ) + json_affects_trackers_created_dt_date_lte = affects_trackers_created_dt_date_lte.isoformat() - json_affects_trackers_created_dt_gt: Union[Unset, None, str] = UNSET + params["affects__trackers__created_dt__date__lte"] = json_affects_trackers_created_dt_date_lte + + json_affects_trackers_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(affects_trackers_created_dt_gt, Unset): - json_affects_trackers_created_dt_gt = ( - affects_trackers_created_dt_gt.isoformat() - if affects_trackers_created_dt_gt - else None - ) + json_affects_trackers_created_dt_gt = affects_trackers_created_dt_gt.isoformat() - json_affects_trackers_created_dt_gte: Union[Unset, None, str] = UNSET + params["affects__trackers__created_dt__gt"] = json_affects_trackers_created_dt_gt + + json_affects_trackers_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(affects_trackers_created_dt_gte, Unset): - json_affects_trackers_created_dt_gte = ( - affects_trackers_created_dt_gte.isoformat() - if affects_trackers_created_dt_gte - else None - ) + json_affects_trackers_created_dt_gte = affects_trackers_created_dt_gte.isoformat() + + params["affects__trackers__created_dt__gte"] = json_affects_trackers_created_dt_gte - json_affects_trackers_created_dt_lt: Union[Unset, None, str] = UNSET + json_affects_trackers_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(affects_trackers_created_dt_lt, Unset): - json_affects_trackers_created_dt_lt = ( - affects_trackers_created_dt_lt.isoformat() - if affects_trackers_created_dt_lt - else None - ) + json_affects_trackers_created_dt_lt = affects_trackers_created_dt_lt.isoformat() + + params["affects__trackers__created_dt__lt"] = json_affects_trackers_created_dt_lt - json_affects_trackers_created_dt_lte: Union[Unset, None, str] = UNSET + json_affects_trackers_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(affects_trackers_created_dt_lte, Unset): - json_affects_trackers_created_dt_lte = ( - affects_trackers_created_dt_lte.isoformat() - if affects_trackers_created_dt_lte - else None - ) + json_affects_trackers_created_dt_lte = affects_trackers_created_dt_lte.isoformat() + + params["affects__trackers__created_dt__lte"] = json_affects_trackers_created_dt_lte + + params["affects__trackers__embargoed"] = affects_trackers_embargoed + + params["affects__trackers__errata__advisory_name"] = affects_trackers_errata_advisory_name - json_affects_trackers_errata_shipped_dt: Union[Unset, None, str] = UNSET + params["affects__trackers__errata__et_id"] = affects_trackers_errata_et_id + + json_affects_trackers_errata_shipped_dt: Union[Unset, str] = UNSET if not isinstance(affects_trackers_errata_shipped_dt, Unset): - json_affects_trackers_errata_shipped_dt = ( - affects_trackers_errata_shipped_dt.isoformat() - if affects_trackers_errata_shipped_dt - else None - ) + json_affects_trackers_errata_shipped_dt = affects_trackers_errata_shipped_dt.isoformat() + + params["affects__trackers__errata__shipped_dt"] = json_affects_trackers_errata_shipped_dt - json_affects_trackers_errata_shipped_dt_date: Union[Unset, None, str] = UNSET + json_affects_trackers_errata_shipped_dt_date: Union[Unset, str] = UNSET if not isinstance(affects_trackers_errata_shipped_dt_date, Unset): - json_affects_trackers_errata_shipped_dt_date = ( - affects_trackers_errata_shipped_dt_date.isoformat() - if affects_trackers_errata_shipped_dt_date - else None - ) + json_affects_trackers_errata_shipped_dt_date = affects_trackers_errata_shipped_dt_date.isoformat() + + params["affects__trackers__errata__shipped_dt__date"] = json_affects_trackers_errata_shipped_dt_date - json_affects_trackers_errata_shipped_dt_date_gte: Union[Unset, None, str] = UNSET + json_affects_trackers_errata_shipped_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(affects_trackers_errata_shipped_dt_date_gte, Unset): - json_affects_trackers_errata_shipped_dt_date_gte = ( - affects_trackers_errata_shipped_dt_date_gte.isoformat() - if affects_trackers_errata_shipped_dt_date_gte - else None - ) + json_affects_trackers_errata_shipped_dt_date_gte = affects_trackers_errata_shipped_dt_date_gte.isoformat() + + params["affects__trackers__errata__shipped_dt__date__gte"] = json_affects_trackers_errata_shipped_dt_date_gte - json_affects_trackers_errata_shipped_dt_date_lte: Union[Unset, None, str] = UNSET + json_affects_trackers_errata_shipped_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(affects_trackers_errata_shipped_dt_date_lte, Unset): - json_affects_trackers_errata_shipped_dt_date_lte = ( - affects_trackers_errata_shipped_dt_date_lte.isoformat() - if affects_trackers_errata_shipped_dt_date_lte - else None - ) + json_affects_trackers_errata_shipped_dt_date_lte = affects_trackers_errata_shipped_dt_date_lte.isoformat() - json_affects_trackers_errata_shipped_dt_gt: Union[Unset, None, str] = UNSET + params["affects__trackers__errata__shipped_dt__date__lte"] = json_affects_trackers_errata_shipped_dt_date_lte + + json_affects_trackers_errata_shipped_dt_gt: Union[Unset, str] = UNSET if not isinstance(affects_trackers_errata_shipped_dt_gt, Unset): - json_affects_trackers_errata_shipped_dt_gt = ( - affects_trackers_errata_shipped_dt_gt.isoformat() - if affects_trackers_errata_shipped_dt_gt - else None - ) + json_affects_trackers_errata_shipped_dt_gt = affects_trackers_errata_shipped_dt_gt.isoformat() - json_affects_trackers_errata_shipped_dt_gte: Union[Unset, None, str] = UNSET + params["affects__trackers__errata__shipped_dt__gt"] = json_affects_trackers_errata_shipped_dt_gt + + json_affects_trackers_errata_shipped_dt_gte: Union[Unset, str] = UNSET if not isinstance(affects_trackers_errata_shipped_dt_gte, Unset): - json_affects_trackers_errata_shipped_dt_gte = ( - affects_trackers_errata_shipped_dt_gte.isoformat() - if affects_trackers_errata_shipped_dt_gte - else None - ) + json_affects_trackers_errata_shipped_dt_gte = affects_trackers_errata_shipped_dt_gte.isoformat() + + params["affects__trackers__errata__shipped_dt__gte"] = json_affects_trackers_errata_shipped_dt_gte - json_affects_trackers_errata_shipped_dt_lt: Union[Unset, None, str] = UNSET + json_affects_trackers_errata_shipped_dt_lt: Union[Unset, str] = UNSET if not isinstance(affects_trackers_errata_shipped_dt_lt, Unset): - json_affects_trackers_errata_shipped_dt_lt = ( - affects_trackers_errata_shipped_dt_lt.isoformat() - if affects_trackers_errata_shipped_dt_lt - else None - ) + json_affects_trackers_errata_shipped_dt_lt = affects_trackers_errata_shipped_dt_lt.isoformat() + + params["affects__trackers__errata__shipped_dt__lt"] = json_affects_trackers_errata_shipped_dt_lt - json_affects_trackers_errata_shipped_dt_lte: Union[Unset, None, str] = UNSET + json_affects_trackers_errata_shipped_dt_lte: Union[Unset, str] = UNSET if not isinstance(affects_trackers_errata_shipped_dt_lte, Unset): - json_affects_trackers_errata_shipped_dt_lte = ( - affects_trackers_errata_shipped_dt_lte.isoformat() - if affects_trackers_errata_shipped_dt_lte - else None - ) + json_affects_trackers_errata_shipped_dt_lte = affects_trackers_errata_shipped_dt_lte.isoformat() + + params["affects__trackers__errata__shipped_dt__lte"] = json_affects_trackers_errata_shipped_dt_lte + + params["affects__trackers__external_system_id"] = affects_trackers_external_system_id + + params["affects__trackers__ps_update_stream"] = affects_trackers_ps_update_stream + + params["affects__trackers__resolution"] = affects_trackers_resolution - json_affects_trackers_type: Union[Unset, None, str] = UNSET + params["affects__trackers__status"] = affects_trackers_status + + json_affects_trackers_type: Union[Unset, str] = UNSET if not isinstance(affects_trackers_type, Unset): + json_affects_trackers_type = OsidbApiV1FlawsListAffectsTrackersType(affects_trackers_type).value - json_affects_trackers_type = ( - OsidbApiV1FlawsListAffectsTrackersType(affects_trackers_type).value - if affects_trackers_type - else None - ) + params["affects__trackers__type"] = json_affects_trackers_type - json_affects_trackers_updated_dt: Union[Unset, None, str] = UNSET + json_affects_trackers_updated_dt: Union[Unset, str] = UNSET if not isinstance(affects_trackers_updated_dt, Unset): - json_affects_trackers_updated_dt = ( - affects_trackers_updated_dt.isoformat() - if affects_trackers_updated_dt - else None - ) + json_affects_trackers_updated_dt = affects_trackers_updated_dt.isoformat() - json_affects_trackers_updated_dt_date: Union[Unset, None, str] = UNSET + params["affects__trackers__updated_dt"] = json_affects_trackers_updated_dt + + json_affects_trackers_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(affects_trackers_updated_dt_date, Unset): - json_affects_trackers_updated_dt_date = ( - affects_trackers_updated_dt_date.isoformat() - if affects_trackers_updated_dt_date - else None - ) + json_affects_trackers_updated_dt_date = affects_trackers_updated_dt_date.isoformat() - json_affects_trackers_updated_dt_date_gte: Union[Unset, None, str] = UNSET + params["affects__trackers__updated_dt__date"] = json_affects_trackers_updated_dt_date + + json_affects_trackers_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(affects_trackers_updated_dt_date_gte, Unset): - json_affects_trackers_updated_dt_date_gte = ( - affects_trackers_updated_dt_date_gte.isoformat() - if affects_trackers_updated_dt_date_gte - else None - ) + json_affects_trackers_updated_dt_date_gte = affects_trackers_updated_dt_date_gte.isoformat() + + params["affects__trackers__updated_dt__date__gte"] = json_affects_trackers_updated_dt_date_gte - json_affects_trackers_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_affects_trackers_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(affects_trackers_updated_dt_date_lte, Unset): - json_affects_trackers_updated_dt_date_lte = ( - affects_trackers_updated_dt_date_lte.isoformat() - if affects_trackers_updated_dt_date_lte - else None - ) + json_affects_trackers_updated_dt_date_lte = affects_trackers_updated_dt_date_lte.isoformat() + + params["affects__trackers__updated_dt__date__lte"] = json_affects_trackers_updated_dt_date_lte - json_affects_trackers_updated_dt_gt: Union[Unset, None, str] = UNSET + json_affects_trackers_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(affects_trackers_updated_dt_gt, Unset): - json_affects_trackers_updated_dt_gt = ( - affects_trackers_updated_dt_gt.isoformat() - if affects_trackers_updated_dt_gt - else None - ) + json_affects_trackers_updated_dt_gt = affects_trackers_updated_dt_gt.isoformat() + + params["affects__trackers__updated_dt__gt"] = json_affects_trackers_updated_dt_gt - json_affects_trackers_updated_dt_gte: Union[Unset, None, str] = UNSET + json_affects_trackers_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(affects_trackers_updated_dt_gte, Unset): - json_affects_trackers_updated_dt_gte = ( - affects_trackers_updated_dt_gte.isoformat() - if affects_trackers_updated_dt_gte - else None - ) + json_affects_trackers_updated_dt_gte = affects_trackers_updated_dt_gte.isoformat() - json_affects_trackers_updated_dt_lt: Union[Unset, None, str] = UNSET + params["affects__trackers__updated_dt__gte"] = json_affects_trackers_updated_dt_gte + + json_affects_trackers_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(affects_trackers_updated_dt_lt, Unset): - json_affects_trackers_updated_dt_lt = ( - affects_trackers_updated_dt_lt.isoformat() - if affects_trackers_updated_dt_lt - else None - ) + json_affects_trackers_updated_dt_lt = affects_trackers_updated_dt_lt.isoformat() - json_affects_trackers_updated_dt_lte: Union[Unset, None, str] = UNSET + params["affects__trackers__updated_dt__lt"] = json_affects_trackers_updated_dt_lt + + json_affects_trackers_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(affects_trackers_updated_dt_lte, Unset): - json_affects_trackers_updated_dt_lte = ( - affects_trackers_updated_dt_lte.isoformat() - if affects_trackers_updated_dt_lte - else None - ) + json_affects_trackers_updated_dt_lte = affects_trackers_updated_dt_lte.isoformat() + + params["affects__trackers__updated_dt__lte"] = json_affects_trackers_updated_dt_lte + + json_affects_trackers_uuid: Union[Unset, str] = UNSET + if not isinstance(affects_trackers_uuid, Unset): + json_affects_trackers_uuid = str(affects_trackers_uuid) - json_affects_updated_dt: Union[Unset, None, str] = UNSET + params["affects__trackers__uuid"] = json_affects_trackers_uuid + + json_affects_updated_dt: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt, Unset): - json_affects_updated_dt = ( - affects_updated_dt.isoformat() if affects_updated_dt else None - ) + json_affects_updated_dt = affects_updated_dt.isoformat() + + params["affects__updated_dt"] = json_affects_updated_dt - json_affects_updated_dt_date: Union[Unset, None, str] = UNSET + json_affects_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_date, Unset): - json_affects_updated_dt_date = ( - affects_updated_dt_date.isoformat() if affects_updated_dt_date else None - ) + json_affects_updated_dt_date = affects_updated_dt_date.isoformat() + + params["affects__updated_dt__date"] = json_affects_updated_dt_date - json_affects_updated_dt_date_gte: Union[Unset, None, str] = UNSET + json_affects_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_date_gte, Unset): - json_affects_updated_dt_date_gte = ( - affects_updated_dt_date_gte.isoformat() - if affects_updated_dt_date_gte - else None - ) + json_affects_updated_dt_date_gte = affects_updated_dt_date_gte.isoformat() + + params["affects__updated_dt__date__gte"] = json_affects_updated_dt_date_gte - json_affects_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_affects_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_date_lte, Unset): - json_affects_updated_dt_date_lte = ( - affects_updated_dt_date_lte.isoformat() - if affects_updated_dt_date_lte - else None - ) + json_affects_updated_dt_date_lte = affects_updated_dt_date_lte.isoformat() - json_affects_updated_dt_gt: Union[Unset, None, str] = UNSET + params["affects__updated_dt__date__lte"] = json_affects_updated_dt_date_lte + + json_affects_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_gt, Unset): - json_affects_updated_dt_gt = ( - affects_updated_dt_gt.isoformat() if affects_updated_dt_gt else None - ) + json_affects_updated_dt_gt = affects_updated_dt_gt.isoformat() - json_affects_updated_dt_gte: Union[Unset, None, str] = UNSET + params["affects__updated_dt__gt"] = json_affects_updated_dt_gt + + json_affects_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_gte, Unset): - json_affects_updated_dt_gte = ( - affects_updated_dt_gte.isoformat() if affects_updated_dt_gte else None - ) + json_affects_updated_dt_gte = affects_updated_dt_gte.isoformat() + + params["affects__updated_dt__gte"] = json_affects_updated_dt_gte - json_affects_updated_dt_lt: Union[Unset, None, str] = UNSET + json_affects_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_lt, Unset): - json_affects_updated_dt_lt = ( - affects_updated_dt_lt.isoformat() if affects_updated_dt_lt else None - ) + json_affects_updated_dt_lt = affects_updated_dt_lt.isoformat() + + params["affects__updated_dt__lt"] = json_affects_updated_dt_lt - json_affects_updated_dt_lte: Union[Unset, None, str] = UNSET + json_affects_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_lte, Unset): - json_affects_updated_dt_lte = ( - affects_updated_dt_lte.isoformat() if affects_updated_dt_lte else None - ) + json_affects_updated_dt_lte = affects_updated_dt_lte.isoformat() + + params["affects__updated_dt__lte"] = json_affects_updated_dt_lte + + json_affects_uuid: Union[Unset, str] = UNSET + if not isinstance(affects_uuid, Unset): + json_affects_uuid = str(affects_uuid) + + params["affects__uuid"] = json_affects_uuid + + params["bz_id"] = bz_id - json_changed_after: Union[Unset, None, str] = UNSET + json_changed_after: Union[Unset, str] = UNSET if not isinstance(changed_after, Unset): - json_changed_after = changed_after.isoformat() if changed_after else None + json_changed_after = changed_after.isoformat() - json_changed_before: Union[Unset, None, str] = UNSET + params["changed_after"] = json_changed_after + + json_changed_before: Union[Unset, str] = UNSET if not isinstance(changed_before, Unset): - json_changed_before = changed_before.isoformat() if changed_before else None + json_changed_before = changed_before.isoformat() + + params["changed_before"] = json_changed_before + + params["comment_zero"] = comment_zero - json_components: Union[Unset, None, List[str]] = UNSET + json_components: Union[Unset, list[str]] = UNSET if not isinstance(components, Unset): - if components is None: - json_components = None - else: - json_components = components + json_components = components + + params["components"] = json_components - json_created_dt: Union[Unset, None, str] = UNSET + json_created_dt: Union[Unset, str] = UNSET if not isinstance(created_dt, Unset): - json_created_dt = created_dt.isoformat() if created_dt else None + json_created_dt = created_dt.isoformat() - json_created_dt_date: Union[Unset, None, str] = UNSET + params["created_dt"] = json_created_dt + + json_created_dt_date: Union[Unset, str] = UNSET if not isinstance(created_dt_date, Unset): - json_created_dt_date = created_dt_date.isoformat() if created_dt_date else None + json_created_dt_date = created_dt_date.isoformat() + + params["created_dt__date"] = json_created_dt_date - json_created_dt_date_gte: Union[Unset, None, str] = UNSET + json_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_gte, Unset): - json_created_dt_date_gte = ( - created_dt_date_gte.isoformat() if created_dt_date_gte else None - ) + json_created_dt_date_gte = created_dt_date_gte.isoformat() - json_created_dt_date_lte: Union[Unset, None, str] = UNSET + params["created_dt__date__gte"] = json_created_dt_date_gte + + json_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_lte, Unset): - json_created_dt_date_lte = ( - created_dt_date_lte.isoformat() if created_dt_date_lte else None - ) + json_created_dt_date_lte = created_dt_date_lte.isoformat() - json_created_dt_gt: Union[Unset, None, str] = UNSET + params["created_dt__date__lte"] = json_created_dt_date_lte + + json_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(created_dt_gt, Unset): - json_created_dt_gt = created_dt_gt.isoformat() if created_dt_gt else None + json_created_dt_gt = created_dt_gt.isoformat() + + params["created_dt__gt"] = json_created_dt_gt - json_created_dt_gte: Union[Unset, None, str] = UNSET + json_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_gte, Unset): - json_created_dt_gte = created_dt_gte.isoformat() if created_dt_gte else None + json_created_dt_gte = created_dt_gte.isoformat() + + params["created_dt__gte"] = json_created_dt_gte - json_created_dt_lt: Union[Unset, None, str] = UNSET + json_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(created_dt_lt, Unset): - json_created_dt_lt = created_dt_lt.isoformat() if created_dt_lt else None + json_created_dt_lt = created_dt_lt.isoformat() - json_created_dt_lte: Union[Unset, None, str] = UNSET + params["created_dt__lt"] = json_created_dt_lt + + json_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_lte, Unset): - json_created_dt_lte = created_dt_lte.isoformat() if created_dt_lte else None + json_created_dt_lte = created_dt_lte.isoformat() + + params["created_dt__lte"] = json_created_dt_lte - json_cve_id: Union[Unset, None, List[str]] = UNSET + params["cve_description"] = cve_description + + params["cve_description__isempty"] = cve_description_isempty + + json_cve_id: Union[Unset, list[str]] = UNSET if not isinstance(cve_id, Unset): - if cve_id is None: - json_cve_id = None - else: - json_cve_id = cve_id + json_cve_id = cve_id + + params["cve_id"] = json_cve_id - json_cvss_scores_created_dt: Union[Unset, None, str] = UNSET + params["cve_id__isempty"] = cve_id_isempty + + params["cvss2_nist__isempty"] = cvss2_nist_isempty + + params["cvss2_rh__isempty"] = cvss2_rh_isempty + + params["cvss3_nist__isempty"] = cvss3_nist_isempty + + params["cvss3_rh__isempty"] = cvss3_rh_isempty + + params["cvss4_nist__isempty"] = cvss4_nist_isempty + + params["cvss4_rh__isempty"] = cvss4_rh_isempty + + params["cvss_scores__comment"] = cvss_scores_comment + + json_cvss_scores_created_dt: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt, Unset): - json_cvss_scores_created_dt = ( - cvss_scores_created_dt.isoformat() if cvss_scores_created_dt else None - ) + json_cvss_scores_created_dt = cvss_scores_created_dt.isoformat() + + params["cvss_scores__created_dt"] = json_cvss_scores_created_dt - json_cvss_scores_created_dt_date: Union[Unset, None, str] = UNSET + json_cvss_scores_created_dt_date: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_date, Unset): - json_cvss_scores_created_dt_date = ( - cvss_scores_created_dt_date.isoformat() - if cvss_scores_created_dt_date - else None - ) + json_cvss_scores_created_dt_date = cvss_scores_created_dt_date.isoformat() + + params["cvss_scores__created_dt__date"] = json_cvss_scores_created_dt_date - json_cvss_scores_created_dt_date_gte: Union[Unset, None, str] = UNSET + json_cvss_scores_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_date_gte, Unset): - json_cvss_scores_created_dt_date_gte = ( - cvss_scores_created_dt_date_gte.isoformat() - if cvss_scores_created_dt_date_gte - else None - ) + json_cvss_scores_created_dt_date_gte = cvss_scores_created_dt_date_gte.isoformat() - json_cvss_scores_created_dt_date_lte: Union[Unset, None, str] = UNSET + params["cvss_scores__created_dt__date__gte"] = json_cvss_scores_created_dt_date_gte + + json_cvss_scores_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_date_lte, Unset): - json_cvss_scores_created_dt_date_lte = ( - cvss_scores_created_dt_date_lte.isoformat() - if cvss_scores_created_dt_date_lte - else None - ) + json_cvss_scores_created_dt_date_lte = cvss_scores_created_dt_date_lte.isoformat() - json_cvss_scores_created_dt_gt: Union[Unset, None, str] = UNSET + params["cvss_scores__created_dt__date__lte"] = json_cvss_scores_created_dt_date_lte + + json_cvss_scores_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_gt, Unset): - json_cvss_scores_created_dt_gt = ( - cvss_scores_created_dt_gt.isoformat() if cvss_scores_created_dt_gt else None - ) + json_cvss_scores_created_dt_gt = cvss_scores_created_dt_gt.isoformat() + + params["cvss_scores__created_dt__gt"] = json_cvss_scores_created_dt_gt - json_cvss_scores_created_dt_gte: Union[Unset, None, str] = UNSET + json_cvss_scores_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_gte, Unset): - json_cvss_scores_created_dt_gte = ( - cvss_scores_created_dt_gte.isoformat() - if cvss_scores_created_dt_gte - else None - ) + json_cvss_scores_created_dt_gte = cvss_scores_created_dt_gte.isoformat() + + params["cvss_scores__created_dt__gte"] = json_cvss_scores_created_dt_gte - json_cvss_scores_created_dt_lt: Union[Unset, None, str] = UNSET + json_cvss_scores_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_lt, Unset): - json_cvss_scores_created_dt_lt = ( - cvss_scores_created_dt_lt.isoformat() if cvss_scores_created_dt_lt else None - ) + json_cvss_scores_created_dt_lt = cvss_scores_created_dt_lt.isoformat() + + params["cvss_scores__created_dt__lt"] = json_cvss_scores_created_dt_lt - json_cvss_scores_created_dt_lte: Union[Unset, None, str] = UNSET + json_cvss_scores_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_created_dt_lte, Unset): - json_cvss_scores_created_dt_lte = ( - cvss_scores_created_dt_lte.isoformat() - if cvss_scores_created_dt_lte - else None - ) + json_cvss_scores_created_dt_lte = cvss_scores_created_dt_lte.isoformat() + + params["cvss_scores__created_dt__lte"] = json_cvss_scores_created_dt_lte + + params["cvss_scores__cvss_version"] = cvss_scores_cvss_version - json_cvss_scores_issuer: Union[Unset, None, str] = UNSET + json_cvss_scores_issuer: Union[Unset, str] = UNSET if not isinstance(cvss_scores_issuer, Unset): + json_cvss_scores_issuer = OsidbApiV1FlawsListCvssScoresIssuer(cvss_scores_issuer).value - json_cvss_scores_issuer = ( - OsidbApiV1FlawsListCvssScoresIssuer(cvss_scores_issuer).value - if cvss_scores_issuer - else None - ) + params["cvss_scores__issuer"] = json_cvss_scores_issuer + + params["cvss_scores__score"] = cvss_scores_score - json_cvss_scores_updated_dt: Union[Unset, None, str] = UNSET + json_cvss_scores_updated_dt: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt, Unset): - json_cvss_scores_updated_dt = ( - cvss_scores_updated_dt.isoformat() if cvss_scores_updated_dt else None - ) + json_cvss_scores_updated_dt = cvss_scores_updated_dt.isoformat() - json_cvss_scores_updated_dt_date: Union[Unset, None, str] = UNSET + params["cvss_scores__updated_dt"] = json_cvss_scores_updated_dt + + json_cvss_scores_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_date, Unset): - json_cvss_scores_updated_dt_date = ( - cvss_scores_updated_dt_date.isoformat() - if cvss_scores_updated_dt_date - else None - ) + json_cvss_scores_updated_dt_date = cvss_scores_updated_dt_date.isoformat() - json_cvss_scores_updated_dt_date_gte: Union[Unset, None, str] = UNSET + params["cvss_scores__updated_dt__date"] = json_cvss_scores_updated_dt_date + + json_cvss_scores_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_date_gte, Unset): - json_cvss_scores_updated_dt_date_gte = ( - cvss_scores_updated_dt_date_gte.isoformat() - if cvss_scores_updated_dt_date_gte - else None - ) + json_cvss_scores_updated_dt_date_gte = cvss_scores_updated_dt_date_gte.isoformat() + + params["cvss_scores__updated_dt__date__gte"] = json_cvss_scores_updated_dt_date_gte - json_cvss_scores_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_cvss_scores_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_date_lte, Unset): - json_cvss_scores_updated_dt_date_lte = ( - cvss_scores_updated_dt_date_lte.isoformat() - if cvss_scores_updated_dt_date_lte - else None - ) + json_cvss_scores_updated_dt_date_lte = cvss_scores_updated_dt_date_lte.isoformat() + + params["cvss_scores__updated_dt__date__lte"] = json_cvss_scores_updated_dt_date_lte - json_cvss_scores_updated_dt_gt: Union[Unset, None, str] = UNSET + json_cvss_scores_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_gt, Unset): - json_cvss_scores_updated_dt_gt = ( - cvss_scores_updated_dt_gt.isoformat() if cvss_scores_updated_dt_gt else None - ) + json_cvss_scores_updated_dt_gt = cvss_scores_updated_dt_gt.isoformat() + + params["cvss_scores__updated_dt__gt"] = json_cvss_scores_updated_dt_gt - json_cvss_scores_updated_dt_gte: Union[Unset, None, str] = UNSET + json_cvss_scores_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_gte, Unset): - json_cvss_scores_updated_dt_gte = ( - cvss_scores_updated_dt_gte.isoformat() - if cvss_scores_updated_dt_gte - else None - ) + json_cvss_scores_updated_dt_gte = cvss_scores_updated_dt_gte.isoformat() - json_cvss_scores_updated_dt_lt: Union[Unset, None, str] = UNSET + params["cvss_scores__updated_dt__gte"] = json_cvss_scores_updated_dt_gte + + json_cvss_scores_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_lt, Unset): - json_cvss_scores_updated_dt_lt = ( - cvss_scores_updated_dt_lt.isoformat() if cvss_scores_updated_dt_lt else None - ) + json_cvss_scores_updated_dt_lt = cvss_scores_updated_dt_lt.isoformat() - json_cvss_scores_updated_dt_lte: Union[Unset, None, str] = UNSET + params["cvss_scores__updated_dt__lt"] = json_cvss_scores_updated_dt_lt + + json_cvss_scores_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(cvss_scores_updated_dt_lte, Unset): - json_cvss_scores_updated_dt_lte = ( - cvss_scores_updated_dt_lte.isoformat() - if cvss_scores_updated_dt_lte - else None - ) + json_cvss_scores_updated_dt_lte = cvss_scores_updated_dt_lte.isoformat() + + params["cvss_scores__updated_dt__lte"] = json_cvss_scores_updated_dt_lte + + json_cvss_scores_uuid: Union[Unset, str] = UNSET + if not isinstance(cvss_scores_uuid, Unset): + json_cvss_scores_uuid = str(cvss_scores_uuid) + + params["cvss_scores__uuid"] = json_cvss_scores_uuid - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + params["cvss_scores__vector"] = cvss_scores_vector + + params["cwe_id"] = cwe_id + + params["cwe_id__isempty"] = cwe_id_isempty + + params["embargoed"] = embargoed + + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields - json_impact: Union[Unset, None, str] = UNSET + params["exclude_fields"] = json_exclude_fields + + json_impact: Union[Unset, str] = UNSET if not isinstance(impact, Unset): + json_impact = OsidbApiV1FlawsListImpact(impact).value - json_impact = OsidbApiV1FlawsListImpact(impact).value if impact else None + params["impact"] = json_impact - json_include_fields: Union[Unset, None, List[str]] = UNSET + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields - json_include_meta_attr: Union[Unset, None, List[str]] = UNSET + json_include_meta_attr: Union[Unset, list[str]] = UNSET if not isinstance(include_meta_attr, Unset): - if include_meta_attr is None: - json_include_meta_attr = None - else: - json_include_meta_attr = include_meta_attr + json_include_meta_attr = include_meta_attr + + params["include_meta_attr"] = json_include_meta_attr - json_major_incident_start_dt: Union[Unset, None, str] = UNSET + params["limit"] = limit + + json_major_incident_start_dt: Union[Unset, str] = UNSET if not isinstance(major_incident_start_dt, Unset): - json_major_incident_start_dt = ( - major_incident_start_dt.isoformat() if major_incident_start_dt else None - ) + json_major_incident_start_dt = major_incident_start_dt.isoformat() + + params["major_incident_start_dt"] = json_major_incident_start_dt - json_major_incident_start_dt_date: Union[Unset, None, str] = UNSET + json_major_incident_start_dt_date: Union[Unset, str] = UNSET if not isinstance(major_incident_start_dt_date, Unset): - json_major_incident_start_dt_date = ( - major_incident_start_dt_date.isoformat() - if major_incident_start_dt_date - else None - ) + json_major_incident_start_dt_date = major_incident_start_dt_date.isoformat() + + params["major_incident_start_dt__date"] = json_major_incident_start_dt_date - json_major_incident_start_dt_date_gte: Union[Unset, None, str] = UNSET + json_major_incident_start_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(major_incident_start_dt_date_gte, Unset): - json_major_incident_start_dt_date_gte = ( - major_incident_start_dt_date_gte.isoformat() - if major_incident_start_dt_date_gte - else None - ) + json_major_incident_start_dt_date_gte = major_incident_start_dt_date_gte.isoformat() + + params["major_incident_start_dt__date__gte"] = json_major_incident_start_dt_date_gte - json_major_incident_start_dt_date_lte: Union[Unset, None, str] = UNSET + json_major_incident_start_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(major_incident_start_dt_date_lte, Unset): - json_major_incident_start_dt_date_lte = ( - major_incident_start_dt_date_lte.isoformat() - if major_incident_start_dt_date_lte - else None - ) + json_major_incident_start_dt_date_lte = major_incident_start_dt_date_lte.isoformat() - json_major_incident_start_dt_gt: Union[Unset, None, str] = UNSET + params["major_incident_start_dt__date__lte"] = json_major_incident_start_dt_date_lte + + json_major_incident_start_dt_gt: Union[Unset, str] = UNSET if not isinstance(major_incident_start_dt_gt, Unset): - json_major_incident_start_dt_gt = ( - major_incident_start_dt_gt.isoformat() - if major_incident_start_dt_gt - else None - ) + json_major_incident_start_dt_gt = major_incident_start_dt_gt.isoformat() - json_major_incident_start_dt_gte: Union[Unset, None, str] = UNSET + params["major_incident_start_dt__gt"] = json_major_incident_start_dt_gt + + json_major_incident_start_dt_gte: Union[Unset, str] = UNSET if not isinstance(major_incident_start_dt_gte, Unset): - json_major_incident_start_dt_gte = ( - major_incident_start_dt_gte.isoformat() - if major_incident_start_dt_gte - else None - ) + json_major_incident_start_dt_gte = major_incident_start_dt_gte.isoformat() + + params["major_incident_start_dt__gte"] = json_major_incident_start_dt_gte - json_major_incident_start_dt_lt: Union[Unset, None, str] = UNSET + json_major_incident_start_dt_lt: Union[Unset, str] = UNSET if not isinstance(major_incident_start_dt_lt, Unset): - json_major_incident_start_dt_lt = ( - major_incident_start_dt_lt.isoformat() - if major_incident_start_dt_lt - else None - ) + json_major_incident_start_dt_lt = major_incident_start_dt_lt.isoformat() + + params["major_incident_start_dt__lt"] = json_major_incident_start_dt_lt - json_major_incident_start_dt_lte: Union[Unset, None, str] = UNSET + json_major_incident_start_dt_lte: Union[Unset, str] = UNSET if not isinstance(major_incident_start_dt_lte, Unset): - json_major_incident_start_dt_lte = ( - major_incident_start_dt_lte.isoformat() - if major_incident_start_dt_lte - else None - ) + json_major_incident_start_dt_lte = major_incident_start_dt_lte.isoformat() + + params["major_incident_start_dt__lte"] = json_major_incident_start_dt_lte - json_major_incident_state: Union[Unset, None, str] = UNSET + json_major_incident_state: Union[Unset, str] = UNSET if not isinstance(major_incident_state, Unset): + json_major_incident_state = OsidbApiV1FlawsListMajorIncidentState(major_incident_state).value - json_major_incident_state = ( - OsidbApiV1FlawsListMajorIncidentState(major_incident_state).value - if major_incident_state - else None - ) + params["major_incident_state"] = json_major_incident_state - json_nist_cvss_validation: Union[Unset, None, str] = UNSET + params["mitigation__isempty"] = mitigation_isempty + + json_nist_cvss_validation: Union[Unset, str] = UNSET if not isinstance(nist_cvss_validation, Unset): + json_nist_cvss_validation = OsidbApiV1FlawsListNistCvssValidation(nist_cvss_validation).value - json_nist_cvss_validation = ( - OsidbApiV1FlawsListNistCvssValidation(nist_cvss_validation).value - if nist_cvss_validation - else None - ) + params["nist_cvss_validation"] = json_nist_cvss_validation - json_order: Union[Unset, None, List[str]] = UNSET + params["offset"] = offset + + json_order: Union[Unset, list[str]] = UNSET if not isinstance(order, Unset): - if order is None: - json_order = None - else: - json_order = [] - for order_item_data in order: - order_item: str = UNSET - if not isinstance(order_item_data, Unset): + json_order = [] + for order_item_data in order: + order_item: str = UNSET + if not isinstance(order_item_data, Unset): + order_item = OsidbApiV1FlawsListOrderItem(order_item_data).value + + json_order.append(order_item) + + params["order"] = json_order - order_item = OsidbApiV1FlawsListOrderItem(order_item_data).value + params["owner"] = owner - json_order.append(order_item) + params["owner__isempty"] = owner_isempty - json_references_created_dt: Union[Unset, None, str] = UNSET + params["query"] = query + + json_references_created_dt: Union[Unset, str] = UNSET if not isinstance(references_created_dt, Unset): - json_references_created_dt = ( - references_created_dt.isoformat() if references_created_dt else None - ) + json_references_created_dt = references_created_dt.isoformat() + + params["references__created_dt"] = json_references_created_dt - json_references_created_dt_date: Union[Unset, None, str] = UNSET + json_references_created_dt_date: Union[Unset, str] = UNSET if not isinstance(references_created_dt_date, Unset): - json_references_created_dt_date = ( - references_created_dt_date.isoformat() - if references_created_dt_date - else None - ) + json_references_created_dt_date = references_created_dt_date.isoformat() + + params["references__created_dt__date"] = json_references_created_dt_date - json_references_created_dt_date_gte: Union[Unset, None, str] = UNSET + json_references_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(references_created_dt_date_gte, Unset): - json_references_created_dt_date_gte = ( - references_created_dt_date_gte.isoformat() - if references_created_dt_date_gte - else None - ) + json_references_created_dt_date_gte = references_created_dt_date_gte.isoformat() + + params["references__created_dt__date__gte"] = json_references_created_dt_date_gte - json_references_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_references_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(references_created_dt_date_lte, Unset): - json_references_created_dt_date_lte = ( - references_created_dt_date_lte.isoformat() - if references_created_dt_date_lte - else None - ) + json_references_created_dt_date_lte = references_created_dt_date_lte.isoformat() - json_references_created_dt_gt: Union[Unset, None, str] = UNSET + params["references__created_dt__date__lte"] = json_references_created_dt_date_lte + + json_references_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(references_created_dt_gt, Unset): - json_references_created_dt_gt = ( - references_created_dt_gt.isoformat() if references_created_dt_gt else None - ) + json_references_created_dt_gt = references_created_dt_gt.isoformat() - json_references_created_dt_gte: Union[Unset, None, str] = UNSET + params["references__created_dt__gt"] = json_references_created_dt_gt + + json_references_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(references_created_dt_gte, Unset): - json_references_created_dt_gte = ( - references_created_dt_gte.isoformat() if references_created_dt_gte else None - ) + json_references_created_dt_gte = references_created_dt_gte.isoformat() + + params["references__created_dt__gte"] = json_references_created_dt_gte - json_references_created_dt_lt: Union[Unset, None, str] = UNSET + json_references_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(references_created_dt_lt, Unset): - json_references_created_dt_lt = ( - references_created_dt_lt.isoformat() if references_created_dt_lt else None - ) + json_references_created_dt_lt = references_created_dt_lt.isoformat() + + params["references__created_dt__lt"] = json_references_created_dt_lt - json_references_created_dt_lte: Union[Unset, None, str] = UNSET + json_references_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(references_created_dt_lte, Unset): - json_references_created_dt_lte = ( - references_created_dt_lte.isoformat() if references_created_dt_lte else None - ) + json_references_created_dt_lte = references_created_dt_lte.isoformat() + + params["references__created_dt__lte"] = json_references_created_dt_lte + + params["references__description"] = references_description - json_references_type: Union[Unset, None, str] = UNSET + json_references_type: Union[Unset, str] = UNSET if not isinstance(references_type, Unset): + json_references_type = OsidbApiV1FlawsListReferencesType(references_type).value - json_references_type = ( - OsidbApiV1FlawsListReferencesType(references_type).value - if references_type - else None - ) + params["references__type"] = json_references_type - json_references_updated_dt: Union[Unset, None, str] = UNSET + json_references_updated_dt: Union[Unset, str] = UNSET if not isinstance(references_updated_dt, Unset): - json_references_updated_dt = ( - references_updated_dt.isoformat() if references_updated_dt else None - ) + json_references_updated_dt = references_updated_dt.isoformat() - json_references_updated_dt_date: Union[Unset, None, str] = UNSET + params["references__updated_dt"] = json_references_updated_dt + + json_references_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(references_updated_dt_date, Unset): - json_references_updated_dt_date = ( - references_updated_dt_date.isoformat() - if references_updated_dt_date - else None - ) + json_references_updated_dt_date = references_updated_dt_date.isoformat() + + params["references__updated_dt__date"] = json_references_updated_dt_date - json_references_updated_dt_date_gte: Union[Unset, None, str] = UNSET + json_references_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(references_updated_dt_date_gte, Unset): - json_references_updated_dt_date_gte = ( - references_updated_dt_date_gte.isoformat() - if references_updated_dt_date_gte - else None - ) + json_references_updated_dt_date_gte = references_updated_dt_date_gte.isoformat() + + params["references__updated_dt__date__gte"] = json_references_updated_dt_date_gte - json_references_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_references_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(references_updated_dt_date_lte, Unset): - json_references_updated_dt_date_lte = ( - references_updated_dt_date_lte.isoformat() - if references_updated_dt_date_lte - else None - ) + json_references_updated_dt_date_lte = references_updated_dt_date_lte.isoformat() + + params["references__updated_dt__date__lte"] = json_references_updated_dt_date_lte - json_references_updated_dt_gt: Union[Unset, None, str] = UNSET + json_references_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(references_updated_dt_gt, Unset): - json_references_updated_dt_gt = ( - references_updated_dt_gt.isoformat() if references_updated_dt_gt else None - ) + json_references_updated_dt_gt = references_updated_dt_gt.isoformat() - json_references_updated_dt_gte: Union[Unset, None, str] = UNSET + params["references__updated_dt__gt"] = json_references_updated_dt_gt + + json_references_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(references_updated_dt_gte, Unset): - json_references_updated_dt_gte = ( - references_updated_dt_gte.isoformat() if references_updated_dt_gte else None - ) + json_references_updated_dt_gte = references_updated_dt_gte.isoformat() - json_references_updated_dt_lt: Union[Unset, None, str] = UNSET + params["references__updated_dt__gte"] = json_references_updated_dt_gte + + json_references_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(references_updated_dt_lt, Unset): - json_references_updated_dt_lt = ( - references_updated_dt_lt.isoformat() if references_updated_dt_lt else None - ) + json_references_updated_dt_lt = references_updated_dt_lt.isoformat() + + params["references__updated_dt__lt"] = json_references_updated_dt_lt - json_references_updated_dt_lte: Union[Unset, None, str] = UNSET + json_references_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(references_updated_dt_lte, Unset): - json_references_updated_dt_lte = ( - references_updated_dt_lte.isoformat() if references_updated_dt_lte else None - ) + json_references_updated_dt_lte = references_updated_dt_lte.isoformat() + + params["references__updated_dt__lte"] = json_references_updated_dt_lte - json_reported_dt: Union[Unset, None, str] = UNSET + params["references__url"] = references_url + + json_references_uuid: Union[Unset, str] = UNSET + if not isinstance(references_uuid, Unset): + json_references_uuid = str(references_uuid) + + params["references__uuid"] = json_references_uuid + + json_reported_dt: Union[Unset, str] = UNSET if not isinstance(reported_dt, Unset): - json_reported_dt = reported_dt.isoformat() if reported_dt else None + json_reported_dt = reported_dt.isoformat() + + params["reported_dt"] = json_reported_dt - json_reported_dt_date: Union[Unset, None, str] = UNSET + json_reported_dt_date: Union[Unset, str] = UNSET if not isinstance(reported_dt_date, Unset): - json_reported_dt_date = ( - reported_dt_date.isoformat() if reported_dt_date else None - ) + json_reported_dt_date = reported_dt_date.isoformat() + + params["reported_dt__date"] = json_reported_dt_date - json_reported_dt_date_gte: Union[Unset, None, str] = UNSET + json_reported_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(reported_dt_date_gte, Unset): - json_reported_dt_date_gte = ( - reported_dt_date_gte.isoformat() if reported_dt_date_gte else None - ) + json_reported_dt_date_gte = reported_dt_date_gte.isoformat() - json_reported_dt_date_lte: Union[Unset, None, str] = UNSET + params["reported_dt__date__gte"] = json_reported_dt_date_gte + + json_reported_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(reported_dt_date_lte, Unset): - json_reported_dt_date_lte = ( - reported_dt_date_lte.isoformat() if reported_dt_date_lte else None - ) + json_reported_dt_date_lte = reported_dt_date_lte.isoformat() - json_reported_dt_gt: Union[Unset, None, str] = UNSET + params["reported_dt__date__lte"] = json_reported_dt_date_lte + + json_reported_dt_gt: Union[Unset, str] = UNSET if not isinstance(reported_dt_gt, Unset): - json_reported_dt_gt = reported_dt_gt.isoformat() if reported_dt_gt else None + json_reported_dt_gt = reported_dt_gt.isoformat() + + params["reported_dt__gt"] = json_reported_dt_gt - json_reported_dt_gte: Union[Unset, None, str] = UNSET + json_reported_dt_gte: Union[Unset, str] = UNSET if not isinstance(reported_dt_gte, Unset): - json_reported_dt_gte = reported_dt_gte.isoformat() if reported_dt_gte else None + json_reported_dt_gte = reported_dt_gte.isoformat() + + params["reported_dt__gte"] = json_reported_dt_gte - json_reported_dt_lt: Union[Unset, None, str] = UNSET + json_reported_dt_lt: Union[Unset, str] = UNSET if not isinstance(reported_dt_lt, Unset): - json_reported_dt_lt = reported_dt_lt.isoformat() if reported_dt_lt else None + json_reported_dt_lt = reported_dt_lt.isoformat() - json_reported_dt_lte: Union[Unset, None, str] = UNSET + params["reported_dt__lt"] = json_reported_dt_lt + + json_reported_dt_lte: Union[Unset, str] = UNSET if not isinstance(reported_dt_lte, Unset): - json_reported_dt_lte = reported_dt_lte.isoformat() if reported_dt_lte else None + json_reported_dt_lte = reported_dt_lte.isoformat() + + params["reported_dt__lte"] = json_reported_dt_lte - json_requires_cve_description: Union[Unset, None, str] = UNSET + json_requires_cve_description: Union[Unset, str] = UNSET if not isinstance(requires_cve_description, Unset): + json_requires_cve_description = OsidbApiV1FlawsListRequiresCveDescription(requires_cve_description).value - json_requires_cve_description = ( - OsidbApiV1FlawsListRequiresCveDescription(requires_cve_description).value - if requires_cve_description - else None - ) + params["requires_cve_description"] = json_requires_cve_description + + params["search"] = search - json_source: Union[Unset, None, str] = UNSET + json_source: Union[Unset, str] = UNSET if not isinstance(source, Unset): + json_source = OsidbApiV1FlawsListSource(source).value + + params["source"] = json_source + + params["statement"] = statement + + params["statement__isempty"] = statement_isempty - json_source = OsidbApiV1FlawsListSource(source).value if source else None + params["team_id"] = team_id - json_tracker_ids: Union[Unset, None, List[str]] = UNSET + params["title"] = title + + json_tracker_ids: Union[Unset, list[str]] = UNSET if not isinstance(tracker_ids, Unset): - if tracker_ids is None: - json_tracker_ids = None - else: - json_tracker_ids = tracker_ids + json_tracker_ids = tracker_ids + + params["tracker_ids"] = json_tracker_ids - json_unembargo_dt: Union[Unset, None, str] = UNSET + json_unembargo_dt: Union[Unset, str] = UNSET if not isinstance(unembargo_dt, Unset): - json_unembargo_dt = unembargo_dt.isoformat() if unembargo_dt else None + json_unembargo_dt = unembargo_dt.isoformat() - json_updated_dt: Union[Unset, None, str] = UNSET + params["unembargo_dt"] = json_unembargo_dt + + json_updated_dt: Union[Unset, str] = UNSET if not isinstance(updated_dt, Unset): - json_updated_dt = updated_dt.isoformat() if updated_dt else None + json_updated_dt = updated_dt.isoformat() + + params["updated_dt"] = json_updated_dt - json_updated_dt_date: Union[Unset, None, str] = UNSET + json_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(updated_dt_date, Unset): - json_updated_dt_date = updated_dt_date.isoformat() if updated_dt_date else None + json_updated_dt_date = updated_dt_date.isoformat() - json_updated_dt_date_gte: Union[Unset, None, str] = UNSET + params["updated_dt__date"] = json_updated_dt_date + + json_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_gte, Unset): - json_updated_dt_date_gte = ( - updated_dt_date_gte.isoformat() if updated_dt_date_gte else None - ) + json_updated_dt_date_gte = updated_dt_date_gte.isoformat() + + params["updated_dt__date__gte"] = json_updated_dt_date_gte - json_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_lte, Unset): - json_updated_dt_date_lte = ( - updated_dt_date_lte.isoformat() if updated_dt_date_lte else None - ) + json_updated_dt_date_lte = updated_dt_date_lte.isoformat() + + params["updated_dt__date__lte"] = json_updated_dt_date_lte - json_updated_dt_gt: Union[Unset, None, str] = UNSET + json_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(updated_dt_gt, Unset): - json_updated_dt_gt = updated_dt_gt.isoformat() if updated_dt_gt else None + json_updated_dt_gt = updated_dt_gt.isoformat() - json_updated_dt_gte: Union[Unset, None, str] = UNSET + params["updated_dt__gt"] = json_updated_dt_gt + + json_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_gte, Unset): - json_updated_dt_gte = updated_dt_gte.isoformat() if updated_dt_gte else None + json_updated_dt_gte = updated_dt_gte.isoformat() + + params["updated_dt__gte"] = json_updated_dt_gte - json_updated_dt_lt: Union[Unset, None, str] = UNSET + json_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(updated_dt_lt, Unset): - json_updated_dt_lt = updated_dt_lt.isoformat() if updated_dt_lt else None + json_updated_dt_lt = updated_dt_lt.isoformat() + + params["updated_dt__lt"] = json_updated_dt_lt - json_updated_dt_lte: Union[Unset, None, str] = UNSET + json_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_lte, Unset): - json_updated_dt_lte = updated_dt_lte.isoformat() if updated_dt_lte else None + json_updated_dt_lte = updated_dt_lte.isoformat() + + params["updated_dt__lte"] = json_updated_dt_lte + + json_uuid: Union[Unset, str] = UNSET + if not isinstance(uuid, Unset): + json_uuid = str(uuid) - json_workflow_state: Union[Unset, None, List[str]] = UNSET + params["uuid"] = json_uuid + + json_workflow_state: Union[Unset, list[str]] = UNSET if not isinstance(workflow_state, Unset): - if workflow_state is None: - json_workflow_state = None - else: - json_workflow_state = [] - for workflow_state_item_data in workflow_state: - workflow_state_item: str = UNSET - if not isinstance(workflow_state_item_data, Unset): - - workflow_state_item = OsidbApiV1FlawsListWorkflowStateItem( - workflow_state_item_data - ).value - - json_workflow_state.append(workflow_state_item) - - params: Dict[str, Any] = { - "acknowledgments__affiliation": acknowledgments_affiliation, - "acknowledgments__created_dt": json_acknowledgments_created_dt, - "acknowledgments__created_dt__date": json_acknowledgments_created_dt_date, - "acknowledgments__created_dt__date__gte": json_acknowledgments_created_dt_date_gte, - "acknowledgments__created_dt__date__lte": json_acknowledgments_created_dt_date_lte, - "acknowledgments__created_dt__gt": json_acknowledgments_created_dt_gt, - "acknowledgments__created_dt__gte": json_acknowledgments_created_dt_gte, - "acknowledgments__created_dt__lt": json_acknowledgments_created_dt_lt, - "acknowledgments__created_dt__lte": json_acknowledgments_created_dt_lte, - "acknowledgments__from_upstream": acknowledgments_from_upstream, - "acknowledgments__name": acknowledgments_name, - "acknowledgments__updated_dt": json_acknowledgments_updated_dt, - "acknowledgments__updated_dt__date": json_acknowledgments_updated_dt_date, - "acknowledgments__updated_dt__date__gte": json_acknowledgments_updated_dt_date_gte, - "acknowledgments__updated_dt__date__lte": json_acknowledgments_updated_dt_date_lte, - "acknowledgments__updated_dt__gt": json_acknowledgments_updated_dt_gt, - "acknowledgments__updated_dt__gte": json_acknowledgments_updated_dt_gte, - "acknowledgments__updated_dt__lt": json_acknowledgments_updated_dt_lt, - "acknowledgments__updated_dt__lte": json_acknowledgments_updated_dt_lte, - "acknowledgments__uuid": acknowledgments_uuid, - "affects__affectedness": json_affects_affectedness, - "affects__created_dt": json_affects_created_dt, - "affects__created_dt__date": json_affects_created_dt_date, - "affects__created_dt__date__gte": json_affects_created_dt_date_gte, - "affects__created_dt__date__lte": json_affects_created_dt_date_lte, - "affects__created_dt__gt": json_affects_created_dt_gt, - "affects__created_dt__gte": json_affects_created_dt_gte, - "affects__created_dt__lt": json_affects_created_dt_lt, - "affects__created_dt__lte": json_affects_created_dt_lte, - "affects__embargoed": affects_embargoed, - "affects__impact": json_affects_impact, - "affects__ps_component": affects_ps_component, - "affects__ps_module": affects_ps_module, - "affects__resolution": json_affects_resolution, - "affects__trackers__created_dt": json_affects_trackers_created_dt, - "affects__trackers__created_dt__date": json_affects_trackers_created_dt_date, - "affects__trackers__created_dt__date__gte": json_affects_trackers_created_dt_date_gte, - "affects__trackers__created_dt__date__lte": json_affects_trackers_created_dt_date_lte, - "affects__trackers__created_dt__gt": json_affects_trackers_created_dt_gt, - "affects__trackers__created_dt__gte": json_affects_trackers_created_dt_gte, - "affects__trackers__created_dt__lt": json_affects_trackers_created_dt_lt, - "affects__trackers__created_dt__lte": json_affects_trackers_created_dt_lte, - "affects__trackers__embargoed": affects_trackers_embargoed, - "affects__trackers__errata__advisory_name": affects_trackers_errata_advisory_name, - "affects__trackers__errata__et_id": affects_trackers_errata_et_id, - "affects__trackers__errata__shipped_dt": json_affects_trackers_errata_shipped_dt, - "affects__trackers__errata__shipped_dt__date": json_affects_trackers_errata_shipped_dt_date, - "affects__trackers__errata__shipped_dt__date__gte": json_affects_trackers_errata_shipped_dt_date_gte, - "affects__trackers__errata__shipped_dt__date__lte": json_affects_trackers_errata_shipped_dt_date_lte, - "affects__trackers__errata__shipped_dt__gt": json_affects_trackers_errata_shipped_dt_gt, - "affects__trackers__errata__shipped_dt__gte": json_affects_trackers_errata_shipped_dt_gte, - "affects__trackers__errata__shipped_dt__lt": json_affects_trackers_errata_shipped_dt_lt, - "affects__trackers__errata__shipped_dt__lte": json_affects_trackers_errata_shipped_dt_lte, - "affects__trackers__external_system_id": affects_trackers_external_system_id, - "affects__trackers__ps_update_stream": affects_trackers_ps_update_stream, - "affects__trackers__resolution": affects_trackers_resolution, - "affects__trackers__status": affects_trackers_status, - "affects__trackers__type": json_affects_trackers_type, - "affects__trackers__updated_dt": json_affects_trackers_updated_dt, - "affects__trackers__updated_dt__date": json_affects_trackers_updated_dt_date, - "affects__trackers__updated_dt__date__gte": json_affects_trackers_updated_dt_date_gte, - "affects__trackers__updated_dt__date__lte": json_affects_trackers_updated_dt_date_lte, - "affects__trackers__updated_dt__gt": json_affects_trackers_updated_dt_gt, - "affects__trackers__updated_dt__gte": json_affects_trackers_updated_dt_gte, - "affects__trackers__updated_dt__lt": json_affects_trackers_updated_dt_lt, - "affects__trackers__updated_dt__lte": json_affects_trackers_updated_dt_lte, - "affects__trackers__uuid": affects_trackers_uuid, - "affects__updated_dt": json_affects_updated_dt, - "affects__updated_dt__date": json_affects_updated_dt_date, - "affects__updated_dt__date__gte": json_affects_updated_dt_date_gte, - "affects__updated_dt__date__lte": json_affects_updated_dt_date_lte, - "affects__updated_dt__gt": json_affects_updated_dt_gt, - "affects__updated_dt__gte": json_affects_updated_dt_gte, - "affects__updated_dt__lt": json_affects_updated_dt_lt, - "affects__updated_dt__lte": json_affects_updated_dt_lte, - "affects__uuid": affects_uuid, - "bz_id": bz_id, - "changed_after": json_changed_after, - "changed_before": json_changed_before, - "comment_zero": comment_zero, - "components": json_components, - "created_dt": json_created_dt, - "created_dt__date": json_created_dt_date, - "created_dt__date__gte": json_created_dt_date_gte, - "created_dt__date__lte": json_created_dt_date_lte, - "created_dt__gt": json_created_dt_gt, - "created_dt__gte": json_created_dt_gte, - "created_dt__lt": json_created_dt_lt, - "created_dt__lte": json_created_dt_lte, - "cve_description": cve_description, - "cve_description__isempty": cve_description_isempty, - "cve_id": json_cve_id, - "cve_id__isempty": cve_id_isempty, - "cvss2_nist__isempty": cvss2_nist_isempty, - "cvss2_rh__isempty": cvss2_rh_isempty, - "cvss3_nist__isempty": cvss3_nist_isempty, - "cvss3_rh__isempty": cvss3_rh_isempty, - "cvss4_nist__isempty": cvss4_nist_isempty, - "cvss4_rh__isempty": cvss4_rh_isempty, - "cvss_scores__comment": cvss_scores_comment, - "cvss_scores__created_dt": json_cvss_scores_created_dt, - "cvss_scores__created_dt__date": json_cvss_scores_created_dt_date, - "cvss_scores__created_dt__date__gte": json_cvss_scores_created_dt_date_gte, - "cvss_scores__created_dt__date__lte": json_cvss_scores_created_dt_date_lte, - "cvss_scores__created_dt__gt": json_cvss_scores_created_dt_gt, - "cvss_scores__created_dt__gte": json_cvss_scores_created_dt_gte, - "cvss_scores__created_dt__lt": json_cvss_scores_created_dt_lt, - "cvss_scores__created_dt__lte": json_cvss_scores_created_dt_lte, - "cvss_scores__cvss_version": cvss_scores_cvss_version, - "cvss_scores__issuer": json_cvss_scores_issuer, - "cvss_scores__score": cvss_scores_score, - "cvss_scores__updated_dt": json_cvss_scores_updated_dt, - "cvss_scores__updated_dt__date": json_cvss_scores_updated_dt_date, - "cvss_scores__updated_dt__date__gte": json_cvss_scores_updated_dt_date_gte, - "cvss_scores__updated_dt__date__lte": json_cvss_scores_updated_dt_date_lte, - "cvss_scores__updated_dt__gt": json_cvss_scores_updated_dt_gt, - "cvss_scores__updated_dt__gte": json_cvss_scores_updated_dt_gte, - "cvss_scores__updated_dt__lt": json_cvss_scores_updated_dt_lt, - "cvss_scores__updated_dt__lte": json_cvss_scores_updated_dt_lte, - "cvss_scores__uuid": cvss_scores_uuid, - "cvss_scores__vector": cvss_scores_vector, - "cwe_id": cwe_id, - "cwe_id__isempty": cwe_id_isempty, - "embargoed": embargoed, - "exclude_fields": json_exclude_fields, - "impact": json_impact, - "include_fields": json_include_fields, - "include_meta_attr": json_include_meta_attr, - "limit": limit, - "major_incident_start_dt": json_major_incident_start_dt, - "major_incident_start_dt__date": json_major_incident_start_dt_date, - "major_incident_start_dt__date__gte": json_major_incident_start_dt_date_gte, - "major_incident_start_dt__date__lte": json_major_incident_start_dt_date_lte, - "major_incident_start_dt__gt": json_major_incident_start_dt_gt, - "major_incident_start_dt__gte": json_major_incident_start_dt_gte, - "major_incident_start_dt__lt": json_major_incident_start_dt_lt, - "major_incident_start_dt__lte": json_major_incident_start_dt_lte, - "major_incident_state": json_major_incident_state, - "mitigation__isempty": mitigation_isempty, - "nist_cvss_validation": json_nist_cvss_validation, - "offset": offset, - "order": json_order, - "owner": owner, - "owner__isempty": owner_isempty, - "query": query, - "references__created_dt": json_references_created_dt, - "references__created_dt__date": json_references_created_dt_date, - "references__created_dt__date__gte": json_references_created_dt_date_gte, - "references__created_dt__date__lte": json_references_created_dt_date_lte, - "references__created_dt__gt": json_references_created_dt_gt, - "references__created_dt__gte": json_references_created_dt_gte, - "references__created_dt__lt": json_references_created_dt_lt, - "references__created_dt__lte": json_references_created_dt_lte, - "references__description": references_description, - "references__type": json_references_type, - "references__updated_dt": json_references_updated_dt, - "references__updated_dt__date": json_references_updated_dt_date, - "references__updated_dt__date__gte": json_references_updated_dt_date_gte, - "references__updated_dt__date__lte": json_references_updated_dt_date_lte, - "references__updated_dt__gt": json_references_updated_dt_gt, - "references__updated_dt__gte": json_references_updated_dt_gte, - "references__updated_dt__lt": json_references_updated_dt_lt, - "references__updated_dt__lte": json_references_updated_dt_lte, - "references__url": references_url, - "references__uuid": references_uuid, - "reported_dt": json_reported_dt, - "reported_dt__date": json_reported_dt_date, - "reported_dt__date__gte": json_reported_dt_date_gte, - "reported_dt__date__lte": json_reported_dt_date_lte, - "reported_dt__gt": json_reported_dt_gt, - "reported_dt__gte": json_reported_dt_gte, - "reported_dt__lt": json_reported_dt_lt, - "reported_dt__lte": json_reported_dt_lte, - "requires_cve_description": json_requires_cve_description, - "search": search, - "source": json_source, - "statement": statement, - "statement__isempty": statement_isempty, - "team_id": team_id, - "title": title, - "tracker_ids": json_tracker_ids, - "unembargo_dt": json_unembargo_dt, - "updated_dt": json_updated_dt, - "updated_dt__date": json_updated_dt_date, - "updated_dt__date__gte": json_updated_dt_date_gte, - "updated_dt__date__lte": json_updated_dt_date_lte, - "updated_dt__gt": json_updated_dt_gt, - "updated_dt__gte": json_updated_dt_gte, - "updated_dt__lt": json_updated_dt_lt, - "updated_dt__lte": json_updated_dt_lte, - "uuid": uuid, - "workflow_state": json_workflow_state, - } + json_workflow_state = [] + for workflow_state_item_data in workflow_state: + workflow_state_item: str = UNSET + if not isinstance(workflow_state_item_data, Unset): + workflow_state_item = OsidbApiV1FlawsListWorkflowStateItem(workflow_state_item_data).value + + json_workflow_state.append(workflow_state_item) + + params["workflow_state"] = json_workflow_state + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/flaws", "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsListResponse200 if isinstance(_response_200, Unset): @@ -1676,242 +1449,418 @@ def _parse_response( response_200 = OsidbApiV1FlawsListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - acknowledgments_affiliation: Union[Unset, None, str] = UNSET, - acknowledgments_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_from_upstream: Union[Unset, None, bool] = UNSET, - acknowledgments_name: Union[Unset, None, str] = UNSET, - acknowledgments_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_uuid: Union[Unset, None, str] = UNSET, - affects_affectedness: Union[ - Unset, None, OsidbApiV1FlawsListAffectsAffectedness - ] = UNSET, - affects_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_embargoed: Union[Unset, None, bool] = UNSET, - affects_impact: Union[Unset, None, OsidbApiV1FlawsListAffectsImpact] = UNSET, - affects_ps_component: Union[Unset, None, str] = UNSET, - affects_ps_module: Union[Unset, None, str] = UNSET, - affects_resolution: Union[ - Unset, None, OsidbApiV1FlawsListAffectsResolution - ] = UNSET, - affects_trackers_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_embargoed: Union[Unset, None, bool] = UNSET, - affects_trackers_errata_advisory_name: Union[Unset, None, str] = UNSET, - affects_trackers_errata_et_id: Union[Unset, None, int] = UNSET, - affects_trackers_errata_shipped_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_errata_shipped_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_errata_shipped_dt_date_gte: Union[ - Unset, None, datetime.date - ] = UNSET, - affects_trackers_errata_shipped_dt_date_lte: Union[ - Unset, None, datetime.date - ] = UNSET, - affects_trackers_errata_shipped_dt_gt: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_gte: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_lt: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_lte: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_external_system_id: Union[Unset, None, str] = UNSET, - affects_trackers_ps_update_stream: Union[Unset, None, str] = UNSET, - affects_trackers_resolution: Union[Unset, None, str] = UNSET, - affects_trackers_status: Union[Unset, None, str] = UNSET, - affects_trackers_type: Union[ - Unset, None, OsidbApiV1FlawsListAffectsTrackersType - ] = UNSET, - affects_trackers_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_uuid: Union[Unset, None, str] = UNSET, - affects_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_uuid: Union[Unset, None, str] = UNSET, - bz_id: Union[Unset, None, float] = UNSET, - changed_after: Union[Unset, None, datetime.datetime] = UNSET, - changed_before: Union[Unset, None, datetime.datetime] = UNSET, - comment_zero: Union[Unset, None, str] = UNSET, - components: Union[Unset, None, List[str]] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cve_description: Union[Unset, None, str] = UNSET, - cve_description_isempty: Union[Unset, None, bool] = UNSET, - cve_id: Union[Unset, None, List[str]] = UNSET, - cve_id_isempty: Union[Unset, None, bool] = UNSET, - cvss2_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss2_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss3_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss3_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss4_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss4_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss_scores_comment: Union[Unset, None, str] = UNSET, - cvss_scores_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_cvss_version: Union[Unset, None, str] = UNSET, - cvss_scores_issuer: Union[Unset, None, OsidbApiV1FlawsListCvssScoresIssuer] = UNSET, - cvss_scores_score: Union[Unset, None, float] = UNSET, - cvss_scores_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_uuid: Union[Unset, None, str] = UNSET, - cvss_scores_vector: Union[Unset, None, str] = UNSET, - cwe_id: Union[Unset, None, str] = UNSET, - cwe_id_isempty: Union[Unset, None, bool] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - impact: Union[Unset, None, OsidbApiV1FlawsListImpact] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - major_incident_start_dt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_date: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_state: Union[ - Unset, None, OsidbApiV1FlawsListMajorIncidentState - ] = UNSET, - mitigation_isempty: Union[Unset, None, bool] = UNSET, - nist_cvss_validation: Union[ - Unset, None, OsidbApiV1FlawsListNistCvssValidation - ] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1FlawsListOrderItem]] = UNSET, - owner: Union[Unset, None, str] = UNSET, - owner_isempty: Union[Unset, None, bool] = UNSET, - query: Union[Unset, None, str] = UNSET, - references_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - references_description: Union[Unset, None, str] = UNSET, - references_type: Union[Unset, None, OsidbApiV1FlawsListReferencesType] = UNSET, - references_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - references_url: Union[Unset, None, str] = UNSET, - references_uuid: Union[Unset, None, str] = UNSET, - reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - requires_cve_description: Union[ - Unset, None, OsidbApiV1FlawsListRequiresCveDescription - ] = UNSET, - search: Union[Unset, None, str] = UNSET, - source: Union[Unset, None, OsidbApiV1FlawsListSource] = UNSET, - statement: Union[Unset, None, str] = UNSET, - statement_isempty: Union[Unset, None, bool] = UNSET, - team_id: Union[Unset, None, str] = UNSET, - title: Union[Unset, None, str] = UNSET, - tracker_ids: Union[Unset, None, List[str]] = UNSET, - unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - workflow_state: Union[ - Unset, None, List[OsidbApiV1FlawsListWorkflowStateItem] - ] = UNSET, + acknowledgments_affiliation: Union[Unset, str] = UNSET, + acknowledgments_created_dt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_date: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_from_upstream: Union[Unset, bool] = UNSET, + acknowledgments_name: Union[Unset, str] = UNSET, + acknowledgments_updated_dt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_date: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_uuid: Union[Unset, UUID] = UNSET, + affects_affectedness: Union[Unset, OsidbApiV1FlawsListAffectsAffectedness] = UNSET, + affects_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_embargoed: Union[Unset, bool] = UNSET, + affects_impact: Union[Unset, OsidbApiV1FlawsListAffectsImpact] = UNSET, + affects_ps_component: Union[Unset, str] = UNSET, + affects_ps_module: Union[Unset, str] = UNSET, + affects_resolution: Union[Unset, OsidbApiV1FlawsListAffectsResolution] = UNSET, + affects_trackers_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_embargoed: Union[Unset, bool] = UNSET, + affects_trackers_errata_advisory_name: Union[Unset, str] = UNSET, + affects_trackers_errata_et_id: Union[Unset, int] = UNSET, + affects_trackers_errata_shipped_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_external_system_id: Union[Unset, str] = UNSET, + affects_trackers_ps_update_stream: Union[Unset, str] = UNSET, + affects_trackers_resolution: Union[Unset, str] = UNSET, + affects_trackers_status: Union[Unset, str] = UNSET, + affects_trackers_type: Union[Unset, OsidbApiV1FlawsListAffectsTrackersType] = UNSET, + affects_trackers_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_uuid: Union[Unset, UUID] = UNSET, + affects_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_uuid: Union[Unset, UUID] = UNSET, + bz_id: Union[Unset, float] = UNSET, + changed_after: Union[Unset, datetime.datetime] = UNSET, + changed_before: Union[Unset, datetime.datetime] = UNSET, + comment_zero: Union[Unset, str] = UNSET, + components: Union[Unset, list[str]] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cve_description: Union[Unset, str] = UNSET, + cve_description_isempty: Union[Unset, bool] = UNSET, + cve_id: Union[Unset, list[str]] = UNSET, + cve_id_isempty: Union[Unset, bool] = UNSET, + cvss2_nist_isempty: Union[Unset, bool] = UNSET, + cvss2_rh_isempty: Union[Unset, bool] = UNSET, + cvss3_nist_isempty: Union[Unset, bool] = UNSET, + cvss3_rh_isempty: Union[Unset, bool] = UNSET, + cvss4_nist_isempty: Union[Unset, bool] = UNSET, + cvss4_rh_isempty: Union[Unset, bool] = UNSET, + cvss_scores_comment: Union[Unset, str] = UNSET, + cvss_scores_created_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_cvss_version: Union[Unset, str] = UNSET, + cvss_scores_issuer: Union[Unset, OsidbApiV1FlawsListCvssScoresIssuer] = UNSET, + cvss_scores_score: Union[Unset, float] = UNSET, + cvss_scores_updated_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_uuid: Union[Unset, UUID] = UNSET, + cvss_scores_vector: Union[Unset, str] = UNSET, + cwe_id: Union[Unset, str] = UNSET, + cwe_id_isempty: Union[Unset, bool] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + impact: Union[Unset, OsidbApiV1FlawsListImpact] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + major_incident_start_dt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_date: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_date_gte: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_date_lte: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_gt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_gte: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_lt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_lte: Union[Unset, datetime.datetime] = UNSET, + major_incident_state: Union[Unset, OsidbApiV1FlawsListMajorIncidentState] = UNSET, + mitigation_isempty: Union[Unset, bool] = UNSET, + nist_cvss_validation: Union[Unset, OsidbApiV1FlawsListNistCvssValidation] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1FlawsListOrderItem]] = UNSET, + owner: Union[Unset, str] = UNSET, + owner_isempty: Union[Unset, bool] = UNSET, + query: Union[Unset, str] = UNSET, + references_created_dt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_date: Union[Unset, datetime.date] = UNSET, + references_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + references_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + references_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + references_description: Union[Unset, str] = UNSET, + references_type: Union[Unset, OsidbApiV1FlawsListReferencesType] = UNSET, + references_updated_dt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_date: Union[Unset, datetime.date] = UNSET, + references_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + references_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + references_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + references_url: Union[Unset, str] = UNSET, + references_uuid: Union[Unset, UUID] = UNSET, + reported_dt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_date: Union[Unset, datetime.date] = UNSET, + reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + requires_cve_description: Union[Unset, OsidbApiV1FlawsListRequiresCveDescription] = UNSET, + search: Union[Unset, str] = UNSET, + source: Union[Unset, OsidbApiV1FlawsListSource] = UNSET, + statement: Union[Unset, str] = UNSET, + statement_isempty: Union[Unset, bool] = UNSET, + team_id: Union[Unset, str] = UNSET, + title: Union[Unset, str] = UNSET, + tracker_ids: Union[Unset, list[str]] = UNSET, + unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + workflow_state: Union[Unset, list[OsidbApiV1FlawsListWorkflowStateItem]] = UNSET, ) -> Response[OsidbApiV1FlawsListResponse200]: + """ + Args: + acknowledgments_affiliation (Union[Unset, str]): + acknowledgments_created_dt (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_date (Union[Unset, datetime.date]): + acknowledgments_created_dt_date_gte (Union[Unset, datetime.date]): + acknowledgments_created_dt_date_lte (Union[Unset, datetime.date]): + acknowledgments_created_dt_gt (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_gte (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_lt (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_lte (Union[Unset, datetime.datetime]): + acknowledgments_from_upstream (Union[Unset, bool]): + acknowledgments_name (Union[Unset, str]): + acknowledgments_updated_dt (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_date (Union[Unset, datetime.date]): + acknowledgments_updated_dt_date_gte (Union[Unset, datetime.date]): + acknowledgments_updated_dt_date_lte (Union[Unset, datetime.date]): + acknowledgments_updated_dt_gt (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_gte (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_lt (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_lte (Union[Unset, datetime.datetime]): + acknowledgments_uuid (Union[Unset, UUID]): + affects_affectedness (Union[Unset, OsidbApiV1FlawsListAffectsAffectedness]): + affects_created_dt (Union[Unset, datetime.datetime]): + affects_created_dt_date (Union[Unset, datetime.date]): + affects_created_dt_date_gte (Union[Unset, datetime.date]): + affects_created_dt_date_lte (Union[Unset, datetime.date]): + affects_created_dt_gt (Union[Unset, datetime.datetime]): + affects_created_dt_gte (Union[Unset, datetime.datetime]): + affects_created_dt_lt (Union[Unset, datetime.datetime]): + affects_created_dt_lte (Union[Unset, datetime.datetime]): + affects_embargoed (Union[Unset, bool]): + affects_impact (Union[Unset, OsidbApiV1FlawsListAffectsImpact]): + affects_ps_component (Union[Unset, str]): + affects_ps_module (Union[Unset, str]): + affects_resolution (Union[Unset, OsidbApiV1FlawsListAffectsResolution]): + affects_trackers_created_dt (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_date (Union[Unset, datetime.date]): + affects_trackers_created_dt_date_gte (Union[Unset, datetime.date]): + affects_trackers_created_dt_date_lte (Union[Unset, datetime.date]): + affects_trackers_created_dt_gt (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_gte (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_lt (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_lte (Union[Unset, datetime.datetime]): + affects_trackers_embargoed (Union[Unset, bool]): + affects_trackers_errata_advisory_name (Union[Unset, str]): + affects_trackers_errata_et_id (Union[Unset, int]): + affects_trackers_errata_shipped_dt (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_date (Union[Unset, datetime.date]): + affects_trackers_errata_shipped_dt_date_gte (Union[Unset, datetime.date]): + affects_trackers_errata_shipped_dt_date_lte (Union[Unset, datetime.date]): + affects_trackers_errata_shipped_dt_gt (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_gte (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_lt (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_lte (Union[Unset, datetime.datetime]): + affects_trackers_external_system_id (Union[Unset, str]): + affects_trackers_ps_update_stream (Union[Unset, str]): + affects_trackers_resolution (Union[Unset, str]): + affects_trackers_status (Union[Unset, str]): + affects_trackers_type (Union[Unset, OsidbApiV1FlawsListAffectsTrackersType]): + affects_trackers_updated_dt (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_date (Union[Unset, datetime.date]): + affects_trackers_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_trackers_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_trackers_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_trackers_uuid (Union[Unset, UUID]): + affects_updated_dt (Union[Unset, datetime.datetime]): + affects_updated_dt_date (Union[Unset, datetime.date]): + affects_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_uuid (Union[Unset, UUID]): + bz_id (Union[Unset, float]): + changed_after (Union[Unset, datetime.datetime]): + changed_before (Union[Unset, datetime.datetime]): + comment_zero (Union[Unset, str]): + components (Union[Unset, list[str]]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cve_description (Union[Unset, str]): + cve_description_isempty (Union[Unset, bool]): + cve_id (Union[Unset, list[str]]): + cve_id_isempty (Union[Unset, bool]): + cvss2_nist_isempty (Union[Unset, bool]): + cvss2_rh_isempty (Union[Unset, bool]): + cvss3_nist_isempty (Union[Unset, bool]): + cvss3_rh_isempty (Union[Unset, bool]): + cvss4_nist_isempty (Union[Unset, bool]): + cvss4_rh_isempty (Union[Unset, bool]): + cvss_scores_comment (Union[Unset, str]): + cvss_scores_created_dt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_date (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_created_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_cvss_version (Union[Unset, str]): + cvss_scores_issuer (Union[Unset, OsidbApiV1FlawsListCvssScoresIssuer]): + cvss_scores_score (Union[Unset, float]): + cvss_scores_updated_dt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_date (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_uuid (Union[Unset, UUID]): + cvss_scores_vector (Union[Unset, str]): + cwe_id (Union[Unset, str]): + cwe_id_isempty (Union[Unset, bool]): + embargoed (Union[Unset, bool]): + exclude_fields (Union[Unset, list[str]]): + impact (Union[Unset, OsidbApiV1FlawsListImpact]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + limit (Union[Unset, int]): + major_incident_start_dt (Union[Unset, datetime.datetime]): + major_incident_start_dt_date (Union[Unset, datetime.date]): + major_incident_start_dt_date_gte (Union[Unset, datetime.date]): + major_incident_start_dt_date_lte (Union[Unset, datetime.date]): + major_incident_start_dt_gt (Union[Unset, datetime.datetime]): + major_incident_start_dt_gte (Union[Unset, datetime.datetime]): + major_incident_start_dt_lt (Union[Unset, datetime.datetime]): + major_incident_start_dt_lte (Union[Unset, datetime.datetime]): + major_incident_state (Union[Unset, OsidbApiV1FlawsListMajorIncidentState]): + mitigation_isempty (Union[Unset, bool]): + nist_cvss_validation (Union[Unset, OsidbApiV1FlawsListNistCvssValidation]): + offset (Union[Unset, int]): + order (Union[Unset, list[OsidbApiV1FlawsListOrderItem]]): + owner (Union[Unset, str]): + owner_isempty (Union[Unset, bool]): + query (Union[Unset, str]): + references_created_dt (Union[Unset, datetime.datetime]): + references_created_dt_date (Union[Unset, datetime.date]): + references_created_dt_date_gte (Union[Unset, datetime.date]): + references_created_dt_date_lte (Union[Unset, datetime.date]): + references_created_dt_gt (Union[Unset, datetime.datetime]): + references_created_dt_gte (Union[Unset, datetime.datetime]): + references_created_dt_lt (Union[Unset, datetime.datetime]): + references_created_dt_lte (Union[Unset, datetime.datetime]): + references_description (Union[Unset, str]): + references_type (Union[Unset, OsidbApiV1FlawsListReferencesType]): + references_updated_dt (Union[Unset, datetime.datetime]): + references_updated_dt_date (Union[Unset, datetime.date]): + references_updated_dt_date_gte (Union[Unset, datetime.date]): + references_updated_dt_date_lte (Union[Unset, datetime.date]): + references_updated_dt_gt (Union[Unset, datetime.datetime]): + references_updated_dt_gte (Union[Unset, datetime.datetime]): + references_updated_dt_lt (Union[Unset, datetime.datetime]): + references_updated_dt_lte (Union[Unset, datetime.datetime]): + references_url (Union[Unset, str]): + references_uuid (Union[Unset, UUID]): + reported_dt (Union[Unset, datetime.datetime]): + reported_dt_date (Union[Unset, datetime.date]): + reported_dt_date_gte (Union[Unset, datetime.date]): + reported_dt_date_lte (Union[Unset, datetime.date]): + reported_dt_gt (Union[Unset, datetime.datetime]): + reported_dt_gte (Union[Unset, datetime.datetime]): + reported_dt_lt (Union[Unset, datetime.datetime]): + reported_dt_lte (Union[Unset, datetime.datetime]): + requires_cve_description (Union[Unset, OsidbApiV1FlawsListRequiresCveDescription]): + search (Union[Unset, str]): + source (Union[Unset, OsidbApiV1FlawsListSource]): + statement (Union[Unset, str]): + statement_isempty (Union[Unset, bool]): + team_id (Union[Unset, str]): + title (Union[Unset, str]): + tracker_ids (Union[Unset, list[str]]): + unembargo_dt (Union[Unset, datetime.datetime]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + workflow_state (Union[Unset, list[OsidbApiV1FlawsListWorkflowStateItem]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsListResponse200] + """ + kwargs = _get_kwargs( client=client, acknowledgments_affiliation=acknowledgments_affiliation, @@ -2116,232 +2065,407 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - acknowledgments_affiliation: Union[Unset, None, str] = UNSET, - acknowledgments_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_from_upstream: Union[Unset, None, bool] = UNSET, - acknowledgments_name: Union[Unset, None, str] = UNSET, - acknowledgments_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_uuid: Union[Unset, None, str] = UNSET, - affects_affectedness: Union[ - Unset, None, OsidbApiV1FlawsListAffectsAffectedness - ] = UNSET, - affects_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_embargoed: Union[Unset, None, bool] = UNSET, - affects_impact: Union[Unset, None, OsidbApiV1FlawsListAffectsImpact] = UNSET, - affects_ps_component: Union[Unset, None, str] = UNSET, - affects_ps_module: Union[Unset, None, str] = UNSET, - affects_resolution: Union[ - Unset, None, OsidbApiV1FlawsListAffectsResolution - ] = UNSET, - affects_trackers_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_embargoed: Union[Unset, None, bool] = UNSET, - affects_trackers_errata_advisory_name: Union[Unset, None, str] = UNSET, - affects_trackers_errata_et_id: Union[Unset, None, int] = UNSET, - affects_trackers_errata_shipped_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_errata_shipped_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_errata_shipped_dt_date_gte: Union[ - Unset, None, datetime.date - ] = UNSET, - affects_trackers_errata_shipped_dt_date_lte: Union[ - Unset, None, datetime.date - ] = UNSET, - affects_trackers_errata_shipped_dt_gt: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_gte: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_lt: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_lte: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_external_system_id: Union[Unset, None, str] = UNSET, - affects_trackers_ps_update_stream: Union[Unset, None, str] = UNSET, - affects_trackers_resolution: Union[Unset, None, str] = UNSET, - affects_trackers_status: Union[Unset, None, str] = UNSET, - affects_trackers_type: Union[ - Unset, None, OsidbApiV1FlawsListAffectsTrackersType - ] = UNSET, - affects_trackers_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_uuid: Union[Unset, None, str] = UNSET, - affects_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_uuid: Union[Unset, None, str] = UNSET, - bz_id: Union[Unset, None, float] = UNSET, - changed_after: Union[Unset, None, datetime.datetime] = UNSET, - changed_before: Union[Unset, None, datetime.datetime] = UNSET, - comment_zero: Union[Unset, None, str] = UNSET, - components: Union[Unset, None, List[str]] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cve_description: Union[Unset, None, str] = UNSET, - cve_description_isempty: Union[Unset, None, bool] = UNSET, - cve_id: Union[Unset, None, List[str]] = UNSET, - cve_id_isempty: Union[Unset, None, bool] = UNSET, - cvss2_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss2_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss3_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss3_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss4_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss4_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss_scores_comment: Union[Unset, None, str] = UNSET, - cvss_scores_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_cvss_version: Union[Unset, None, str] = UNSET, - cvss_scores_issuer: Union[Unset, None, OsidbApiV1FlawsListCvssScoresIssuer] = UNSET, - cvss_scores_score: Union[Unset, None, float] = UNSET, - cvss_scores_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_uuid: Union[Unset, None, str] = UNSET, - cvss_scores_vector: Union[Unset, None, str] = UNSET, - cwe_id: Union[Unset, None, str] = UNSET, - cwe_id_isempty: Union[Unset, None, bool] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - impact: Union[Unset, None, OsidbApiV1FlawsListImpact] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - major_incident_start_dt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_date: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_state: Union[ - Unset, None, OsidbApiV1FlawsListMajorIncidentState - ] = UNSET, - mitigation_isempty: Union[Unset, None, bool] = UNSET, - nist_cvss_validation: Union[ - Unset, None, OsidbApiV1FlawsListNistCvssValidation - ] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1FlawsListOrderItem]] = UNSET, - owner: Union[Unset, None, str] = UNSET, - owner_isempty: Union[Unset, None, bool] = UNSET, - query: Union[Unset, None, str] = UNSET, - references_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - references_description: Union[Unset, None, str] = UNSET, - references_type: Union[Unset, None, OsidbApiV1FlawsListReferencesType] = UNSET, - references_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - references_url: Union[Unset, None, str] = UNSET, - references_uuid: Union[Unset, None, str] = UNSET, - reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - requires_cve_description: Union[ - Unset, None, OsidbApiV1FlawsListRequiresCveDescription - ] = UNSET, - search: Union[Unset, None, str] = UNSET, - source: Union[Unset, None, OsidbApiV1FlawsListSource] = UNSET, - statement: Union[Unset, None, str] = UNSET, - statement_isempty: Union[Unset, None, bool] = UNSET, - team_id: Union[Unset, None, str] = UNSET, - title: Union[Unset, None, str] = UNSET, - tracker_ids: Union[Unset, None, List[str]] = UNSET, - unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - workflow_state: Union[ - Unset, None, List[OsidbApiV1FlawsListWorkflowStateItem] - ] = UNSET, + acknowledgments_affiliation: Union[Unset, str] = UNSET, + acknowledgments_created_dt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_date: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_from_upstream: Union[Unset, bool] = UNSET, + acknowledgments_name: Union[Unset, str] = UNSET, + acknowledgments_updated_dt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_date: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_uuid: Union[Unset, UUID] = UNSET, + affects_affectedness: Union[Unset, OsidbApiV1FlawsListAffectsAffectedness] = UNSET, + affects_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_embargoed: Union[Unset, bool] = UNSET, + affects_impact: Union[Unset, OsidbApiV1FlawsListAffectsImpact] = UNSET, + affects_ps_component: Union[Unset, str] = UNSET, + affects_ps_module: Union[Unset, str] = UNSET, + affects_resolution: Union[Unset, OsidbApiV1FlawsListAffectsResolution] = UNSET, + affects_trackers_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_embargoed: Union[Unset, bool] = UNSET, + affects_trackers_errata_advisory_name: Union[Unset, str] = UNSET, + affects_trackers_errata_et_id: Union[Unset, int] = UNSET, + affects_trackers_errata_shipped_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_external_system_id: Union[Unset, str] = UNSET, + affects_trackers_ps_update_stream: Union[Unset, str] = UNSET, + affects_trackers_resolution: Union[Unset, str] = UNSET, + affects_trackers_status: Union[Unset, str] = UNSET, + affects_trackers_type: Union[Unset, OsidbApiV1FlawsListAffectsTrackersType] = UNSET, + affects_trackers_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_uuid: Union[Unset, UUID] = UNSET, + affects_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_uuid: Union[Unset, UUID] = UNSET, + bz_id: Union[Unset, float] = UNSET, + changed_after: Union[Unset, datetime.datetime] = UNSET, + changed_before: Union[Unset, datetime.datetime] = UNSET, + comment_zero: Union[Unset, str] = UNSET, + components: Union[Unset, list[str]] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cve_description: Union[Unset, str] = UNSET, + cve_description_isempty: Union[Unset, bool] = UNSET, + cve_id: Union[Unset, list[str]] = UNSET, + cve_id_isempty: Union[Unset, bool] = UNSET, + cvss2_nist_isempty: Union[Unset, bool] = UNSET, + cvss2_rh_isempty: Union[Unset, bool] = UNSET, + cvss3_nist_isempty: Union[Unset, bool] = UNSET, + cvss3_rh_isempty: Union[Unset, bool] = UNSET, + cvss4_nist_isempty: Union[Unset, bool] = UNSET, + cvss4_rh_isempty: Union[Unset, bool] = UNSET, + cvss_scores_comment: Union[Unset, str] = UNSET, + cvss_scores_created_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_cvss_version: Union[Unset, str] = UNSET, + cvss_scores_issuer: Union[Unset, OsidbApiV1FlawsListCvssScoresIssuer] = UNSET, + cvss_scores_score: Union[Unset, float] = UNSET, + cvss_scores_updated_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_uuid: Union[Unset, UUID] = UNSET, + cvss_scores_vector: Union[Unset, str] = UNSET, + cwe_id: Union[Unset, str] = UNSET, + cwe_id_isempty: Union[Unset, bool] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + impact: Union[Unset, OsidbApiV1FlawsListImpact] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + major_incident_start_dt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_date: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_date_gte: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_date_lte: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_gt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_gte: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_lt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_lte: Union[Unset, datetime.datetime] = UNSET, + major_incident_state: Union[Unset, OsidbApiV1FlawsListMajorIncidentState] = UNSET, + mitigation_isempty: Union[Unset, bool] = UNSET, + nist_cvss_validation: Union[Unset, OsidbApiV1FlawsListNistCvssValidation] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1FlawsListOrderItem]] = UNSET, + owner: Union[Unset, str] = UNSET, + owner_isempty: Union[Unset, bool] = UNSET, + query: Union[Unset, str] = UNSET, + references_created_dt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_date: Union[Unset, datetime.date] = UNSET, + references_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + references_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + references_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + references_description: Union[Unset, str] = UNSET, + references_type: Union[Unset, OsidbApiV1FlawsListReferencesType] = UNSET, + references_updated_dt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_date: Union[Unset, datetime.date] = UNSET, + references_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + references_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + references_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + references_url: Union[Unset, str] = UNSET, + references_uuid: Union[Unset, UUID] = UNSET, + reported_dt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_date: Union[Unset, datetime.date] = UNSET, + reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + requires_cve_description: Union[Unset, OsidbApiV1FlawsListRequiresCveDescription] = UNSET, + search: Union[Unset, str] = UNSET, + source: Union[Unset, OsidbApiV1FlawsListSource] = UNSET, + statement: Union[Unset, str] = UNSET, + statement_isempty: Union[Unset, bool] = UNSET, + team_id: Union[Unset, str] = UNSET, + title: Union[Unset, str] = UNSET, + tracker_ids: Union[Unset, list[str]] = UNSET, + unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + workflow_state: Union[Unset, list[OsidbApiV1FlawsListWorkflowStateItem]] = UNSET, ) -> Optional[OsidbApiV1FlawsListResponse200]: - """ """ + """ + Args: + acknowledgments_affiliation (Union[Unset, str]): + acknowledgments_created_dt (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_date (Union[Unset, datetime.date]): + acknowledgments_created_dt_date_gte (Union[Unset, datetime.date]): + acknowledgments_created_dt_date_lte (Union[Unset, datetime.date]): + acknowledgments_created_dt_gt (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_gte (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_lt (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_lte (Union[Unset, datetime.datetime]): + acknowledgments_from_upstream (Union[Unset, bool]): + acknowledgments_name (Union[Unset, str]): + acknowledgments_updated_dt (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_date (Union[Unset, datetime.date]): + acknowledgments_updated_dt_date_gte (Union[Unset, datetime.date]): + acknowledgments_updated_dt_date_lte (Union[Unset, datetime.date]): + acknowledgments_updated_dt_gt (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_gte (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_lt (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_lte (Union[Unset, datetime.datetime]): + acknowledgments_uuid (Union[Unset, UUID]): + affects_affectedness (Union[Unset, OsidbApiV1FlawsListAffectsAffectedness]): + affects_created_dt (Union[Unset, datetime.datetime]): + affects_created_dt_date (Union[Unset, datetime.date]): + affects_created_dt_date_gte (Union[Unset, datetime.date]): + affects_created_dt_date_lte (Union[Unset, datetime.date]): + affects_created_dt_gt (Union[Unset, datetime.datetime]): + affects_created_dt_gte (Union[Unset, datetime.datetime]): + affects_created_dt_lt (Union[Unset, datetime.datetime]): + affects_created_dt_lte (Union[Unset, datetime.datetime]): + affects_embargoed (Union[Unset, bool]): + affects_impact (Union[Unset, OsidbApiV1FlawsListAffectsImpact]): + affects_ps_component (Union[Unset, str]): + affects_ps_module (Union[Unset, str]): + affects_resolution (Union[Unset, OsidbApiV1FlawsListAffectsResolution]): + affects_trackers_created_dt (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_date (Union[Unset, datetime.date]): + affects_trackers_created_dt_date_gte (Union[Unset, datetime.date]): + affects_trackers_created_dt_date_lte (Union[Unset, datetime.date]): + affects_trackers_created_dt_gt (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_gte (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_lt (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_lte (Union[Unset, datetime.datetime]): + affects_trackers_embargoed (Union[Unset, bool]): + affects_trackers_errata_advisory_name (Union[Unset, str]): + affects_trackers_errata_et_id (Union[Unset, int]): + affects_trackers_errata_shipped_dt (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_date (Union[Unset, datetime.date]): + affects_trackers_errata_shipped_dt_date_gte (Union[Unset, datetime.date]): + affects_trackers_errata_shipped_dt_date_lte (Union[Unset, datetime.date]): + affects_trackers_errata_shipped_dt_gt (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_gte (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_lt (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_lte (Union[Unset, datetime.datetime]): + affects_trackers_external_system_id (Union[Unset, str]): + affects_trackers_ps_update_stream (Union[Unset, str]): + affects_trackers_resolution (Union[Unset, str]): + affects_trackers_status (Union[Unset, str]): + affects_trackers_type (Union[Unset, OsidbApiV1FlawsListAffectsTrackersType]): + affects_trackers_updated_dt (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_date (Union[Unset, datetime.date]): + affects_trackers_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_trackers_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_trackers_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_trackers_uuid (Union[Unset, UUID]): + affects_updated_dt (Union[Unset, datetime.datetime]): + affects_updated_dt_date (Union[Unset, datetime.date]): + affects_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_uuid (Union[Unset, UUID]): + bz_id (Union[Unset, float]): + changed_after (Union[Unset, datetime.datetime]): + changed_before (Union[Unset, datetime.datetime]): + comment_zero (Union[Unset, str]): + components (Union[Unset, list[str]]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cve_description (Union[Unset, str]): + cve_description_isempty (Union[Unset, bool]): + cve_id (Union[Unset, list[str]]): + cve_id_isempty (Union[Unset, bool]): + cvss2_nist_isempty (Union[Unset, bool]): + cvss2_rh_isempty (Union[Unset, bool]): + cvss3_nist_isempty (Union[Unset, bool]): + cvss3_rh_isempty (Union[Unset, bool]): + cvss4_nist_isempty (Union[Unset, bool]): + cvss4_rh_isempty (Union[Unset, bool]): + cvss_scores_comment (Union[Unset, str]): + cvss_scores_created_dt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_date (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_created_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_cvss_version (Union[Unset, str]): + cvss_scores_issuer (Union[Unset, OsidbApiV1FlawsListCvssScoresIssuer]): + cvss_scores_score (Union[Unset, float]): + cvss_scores_updated_dt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_date (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_uuid (Union[Unset, UUID]): + cvss_scores_vector (Union[Unset, str]): + cwe_id (Union[Unset, str]): + cwe_id_isempty (Union[Unset, bool]): + embargoed (Union[Unset, bool]): + exclude_fields (Union[Unset, list[str]]): + impact (Union[Unset, OsidbApiV1FlawsListImpact]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + limit (Union[Unset, int]): + major_incident_start_dt (Union[Unset, datetime.datetime]): + major_incident_start_dt_date (Union[Unset, datetime.date]): + major_incident_start_dt_date_gte (Union[Unset, datetime.date]): + major_incident_start_dt_date_lte (Union[Unset, datetime.date]): + major_incident_start_dt_gt (Union[Unset, datetime.datetime]): + major_incident_start_dt_gte (Union[Unset, datetime.datetime]): + major_incident_start_dt_lt (Union[Unset, datetime.datetime]): + major_incident_start_dt_lte (Union[Unset, datetime.datetime]): + major_incident_state (Union[Unset, OsidbApiV1FlawsListMajorIncidentState]): + mitigation_isempty (Union[Unset, bool]): + nist_cvss_validation (Union[Unset, OsidbApiV1FlawsListNistCvssValidation]): + offset (Union[Unset, int]): + order (Union[Unset, list[OsidbApiV1FlawsListOrderItem]]): + owner (Union[Unset, str]): + owner_isempty (Union[Unset, bool]): + query (Union[Unset, str]): + references_created_dt (Union[Unset, datetime.datetime]): + references_created_dt_date (Union[Unset, datetime.date]): + references_created_dt_date_gte (Union[Unset, datetime.date]): + references_created_dt_date_lte (Union[Unset, datetime.date]): + references_created_dt_gt (Union[Unset, datetime.datetime]): + references_created_dt_gte (Union[Unset, datetime.datetime]): + references_created_dt_lt (Union[Unset, datetime.datetime]): + references_created_dt_lte (Union[Unset, datetime.datetime]): + references_description (Union[Unset, str]): + references_type (Union[Unset, OsidbApiV1FlawsListReferencesType]): + references_updated_dt (Union[Unset, datetime.datetime]): + references_updated_dt_date (Union[Unset, datetime.date]): + references_updated_dt_date_gte (Union[Unset, datetime.date]): + references_updated_dt_date_lte (Union[Unset, datetime.date]): + references_updated_dt_gt (Union[Unset, datetime.datetime]): + references_updated_dt_gte (Union[Unset, datetime.datetime]): + references_updated_dt_lt (Union[Unset, datetime.datetime]): + references_updated_dt_lte (Union[Unset, datetime.datetime]): + references_url (Union[Unset, str]): + references_uuid (Union[Unset, UUID]): + reported_dt (Union[Unset, datetime.datetime]): + reported_dt_date (Union[Unset, datetime.date]): + reported_dt_date_gte (Union[Unset, datetime.date]): + reported_dt_date_lte (Union[Unset, datetime.date]): + reported_dt_gt (Union[Unset, datetime.datetime]): + reported_dt_gte (Union[Unset, datetime.datetime]): + reported_dt_lt (Union[Unset, datetime.datetime]): + reported_dt_lte (Union[Unset, datetime.datetime]): + requires_cve_description (Union[Unset, OsidbApiV1FlawsListRequiresCveDescription]): + search (Union[Unset, str]): + source (Union[Unset, OsidbApiV1FlawsListSource]): + statement (Union[Unset, str]): + statement_isempty (Union[Unset, bool]): + team_id (Union[Unset, str]): + title (Union[Unset, str]): + tracker_ids (Union[Unset, list[str]]): + unembargo_dt (Union[Unset, datetime.datetime]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + workflow_state (Union[Unset, list[OsidbApiV1FlawsListWorkflowStateItem]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsListResponse200 + """ return sync_detailed( client=client, @@ -2540,228 +2664,405 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - acknowledgments_affiliation: Union[Unset, None, str] = UNSET, - acknowledgments_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_from_upstream: Union[Unset, None, bool] = UNSET, - acknowledgments_name: Union[Unset, None, str] = UNSET, - acknowledgments_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_uuid: Union[Unset, None, str] = UNSET, - affects_affectedness: Union[ - Unset, None, OsidbApiV1FlawsListAffectsAffectedness - ] = UNSET, - affects_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_embargoed: Union[Unset, None, bool] = UNSET, - affects_impact: Union[Unset, None, OsidbApiV1FlawsListAffectsImpact] = UNSET, - affects_ps_component: Union[Unset, None, str] = UNSET, - affects_ps_module: Union[Unset, None, str] = UNSET, - affects_resolution: Union[ - Unset, None, OsidbApiV1FlawsListAffectsResolution - ] = UNSET, - affects_trackers_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_embargoed: Union[Unset, None, bool] = UNSET, - affects_trackers_errata_advisory_name: Union[Unset, None, str] = UNSET, - affects_trackers_errata_et_id: Union[Unset, None, int] = UNSET, - affects_trackers_errata_shipped_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_errata_shipped_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_errata_shipped_dt_date_gte: Union[ - Unset, None, datetime.date - ] = UNSET, - affects_trackers_errata_shipped_dt_date_lte: Union[ - Unset, None, datetime.date - ] = UNSET, - affects_trackers_errata_shipped_dt_gt: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_gte: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_lt: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_lte: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_external_system_id: Union[Unset, None, str] = UNSET, - affects_trackers_ps_update_stream: Union[Unset, None, str] = UNSET, - affects_trackers_resolution: Union[Unset, None, str] = UNSET, - affects_trackers_status: Union[Unset, None, str] = UNSET, - affects_trackers_type: Union[ - Unset, None, OsidbApiV1FlawsListAffectsTrackersType - ] = UNSET, - affects_trackers_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_uuid: Union[Unset, None, str] = UNSET, - affects_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_uuid: Union[Unset, None, str] = UNSET, - bz_id: Union[Unset, None, float] = UNSET, - changed_after: Union[Unset, None, datetime.datetime] = UNSET, - changed_before: Union[Unset, None, datetime.datetime] = UNSET, - comment_zero: Union[Unset, None, str] = UNSET, - components: Union[Unset, None, List[str]] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cve_description: Union[Unset, None, str] = UNSET, - cve_description_isempty: Union[Unset, None, bool] = UNSET, - cve_id: Union[Unset, None, List[str]] = UNSET, - cve_id_isempty: Union[Unset, None, bool] = UNSET, - cvss2_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss2_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss3_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss3_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss4_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss4_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss_scores_comment: Union[Unset, None, str] = UNSET, - cvss_scores_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_cvss_version: Union[Unset, None, str] = UNSET, - cvss_scores_issuer: Union[Unset, None, OsidbApiV1FlawsListCvssScoresIssuer] = UNSET, - cvss_scores_score: Union[Unset, None, float] = UNSET, - cvss_scores_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_uuid: Union[Unset, None, str] = UNSET, - cvss_scores_vector: Union[Unset, None, str] = UNSET, - cwe_id: Union[Unset, None, str] = UNSET, - cwe_id_isempty: Union[Unset, None, bool] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - impact: Union[Unset, None, OsidbApiV1FlawsListImpact] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - major_incident_start_dt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_date: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_state: Union[ - Unset, None, OsidbApiV1FlawsListMajorIncidentState - ] = UNSET, - mitigation_isempty: Union[Unset, None, bool] = UNSET, - nist_cvss_validation: Union[ - Unset, None, OsidbApiV1FlawsListNistCvssValidation - ] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1FlawsListOrderItem]] = UNSET, - owner: Union[Unset, None, str] = UNSET, - owner_isempty: Union[Unset, None, bool] = UNSET, - query: Union[Unset, None, str] = UNSET, - references_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - references_description: Union[Unset, None, str] = UNSET, - references_type: Union[Unset, None, OsidbApiV1FlawsListReferencesType] = UNSET, - references_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - references_url: Union[Unset, None, str] = UNSET, - references_uuid: Union[Unset, None, str] = UNSET, - reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - requires_cve_description: Union[ - Unset, None, OsidbApiV1FlawsListRequiresCveDescription - ] = UNSET, - search: Union[Unset, None, str] = UNSET, - source: Union[Unset, None, OsidbApiV1FlawsListSource] = UNSET, - statement: Union[Unset, None, str] = UNSET, - statement_isempty: Union[Unset, None, bool] = UNSET, - team_id: Union[Unset, None, str] = UNSET, - title: Union[Unset, None, str] = UNSET, - tracker_ids: Union[Unset, None, List[str]] = UNSET, - unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - workflow_state: Union[ - Unset, None, List[OsidbApiV1FlawsListWorkflowStateItem] - ] = UNSET, + acknowledgments_affiliation: Union[Unset, str] = UNSET, + acknowledgments_created_dt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_date: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_from_upstream: Union[Unset, bool] = UNSET, + acknowledgments_name: Union[Unset, str] = UNSET, + acknowledgments_updated_dt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_date: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_uuid: Union[Unset, UUID] = UNSET, + affects_affectedness: Union[Unset, OsidbApiV1FlawsListAffectsAffectedness] = UNSET, + affects_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_embargoed: Union[Unset, bool] = UNSET, + affects_impact: Union[Unset, OsidbApiV1FlawsListAffectsImpact] = UNSET, + affects_ps_component: Union[Unset, str] = UNSET, + affects_ps_module: Union[Unset, str] = UNSET, + affects_resolution: Union[Unset, OsidbApiV1FlawsListAffectsResolution] = UNSET, + affects_trackers_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_embargoed: Union[Unset, bool] = UNSET, + affects_trackers_errata_advisory_name: Union[Unset, str] = UNSET, + affects_trackers_errata_et_id: Union[Unset, int] = UNSET, + affects_trackers_errata_shipped_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_external_system_id: Union[Unset, str] = UNSET, + affects_trackers_ps_update_stream: Union[Unset, str] = UNSET, + affects_trackers_resolution: Union[Unset, str] = UNSET, + affects_trackers_status: Union[Unset, str] = UNSET, + affects_trackers_type: Union[Unset, OsidbApiV1FlawsListAffectsTrackersType] = UNSET, + affects_trackers_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_uuid: Union[Unset, UUID] = UNSET, + affects_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_uuid: Union[Unset, UUID] = UNSET, + bz_id: Union[Unset, float] = UNSET, + changed_after: Union[Unset, datetime.datetime] = UNSET, + changed_before: Union[Unset, datetime.datetime] = UNSET, + comment_zero: Union[Unset, str] = UNSET, + components: Union[Unset, list[str]] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cve_description: Union[Unset, str] = UNSET, + cve_description_isempty: Union[Unset, bool] = UNSET, + cve_id: Union[Unset, list[str]] = UNSET, + cve_id_isempty: Union[Unset, bool] = UNSET, + cvss2_nist_isempty: Union[Unset, bool] = UNSET, + cvss2_rh_isempty: Union[Unset, bool] = UNSET, + cvss3_nist_isempty: Union[Unset, bool] = UNSET, + cvss3_rh_isempty: Union[Unset, bool] = UNSET, + cvss4_nist_isempty: Union[Unset, bool] = UNSET, + cvss4_rh_isempty: Union[Unset, bool] = UNSET, + cvss_scores_comment: Union[Unset, str] = UNSET, + cvss_scores_created_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_cvss_version: Union[Unset, str] = UNSET, + cvss_scores_issuer: Union[Unset, OsidbApiV1FlawsListCvssScoresIssuer] = UNSET, + cvss_scores_score: Union[Unset, float] = UNSET, + cvss_scores_updated_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_uuid: Union[Unset, UUID] = UNSET, + cvss_scores_vector: Union[Unset, str] = UNSET, + cwe_id: Union[Unset, str] = UNSET, + cwe_id_isempty: Union[Unset, bool] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + impact: Union[Unset, OsidbApiV1FlawsListImpact] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + major_incident_start_dt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_date: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_date_gte: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_date_lte: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_gt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_gte: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_lt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_lte: Union[Unset, datetime.datetime] = UNSET, + major_incident_state: Union[Unset, OsidbApiV1FlawsListMajorIncidentState] = UNSET, + mitigation_isempty: Union[Unset, bool] = UNSET, + nist_cvss_validation: Union[Unset, OsidbApiV1FlawsListNistCvssValidation] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1FlawsListOrderItem]] = UNSET, + owner: Union[Unset, str] = UNSET, + owner_isempty: Union[Unset, bool] = UNSET, + query: Union[Unset, str] = UNSET, + references_created_dt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_date: Union[Unset, datetime.date] = UNSET, + references_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + references_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + references_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + references_description: Union[Unset, str] = UNSET, + references_type: Union[Unset, OsidbApiV1FlawsListReferencesType] = UNSET, + references_updated_dt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_date: Union[Unset, datetime.date] = UNSET, + references_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + references_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + references_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + references_url: Union[Unset, str] = UNSET, + references_uuid: Union[Unset, UUID] = UNSET, + reported_dt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_date: Union[Unset, datetime.date] = UNSET, + reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + requires_cve_description: Union[Unset, OsidbApiV1FlawsListRequiresCveDescription] = UNSET, + search: Union[Unset, str] = UNSET, + source: Union[Unset, OsidbApiV1FlawsListSource] = UNSET, + statement: Union[Unset, str] = UNSET, + statement_isempty: Union[Unset, bool] = UNSET, + team_id: Union[Unset, str] = UNSET, + title: Union[Unset, str] = UNSET, + tracker_ids: Union[Unset, list[str]] = UNSET, + unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + workflow_state: Union[Unset, list[OsidbApiV1FlawsListWorkflowStateItem]] = UNSET, ) -> Response[OsidbApiV1FlawsListResponse200]: + """ + Args: + acknowledgments_affiliation (Union[Unset, str]): + acknowledgments_created_dt (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_date (Union[Unset, datetime.date]): + acknowledgments_created_dt_date_gte (Union[Unset, datetime.date]): + acknowledgments_created_dt_date_lte (Union[Unset, datetime.date]): + acknowledgments_created_dt_gt (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_gte (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_lt (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_lte (Union[Unset, datetime.datetime]): + acknowledgments_from_upstream (Union[Unset, bool]): + acknowledgments_name (Union[Unset, str]): + acknowledgments_updated_dt (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_date (Union[Unset, datetime.date]): + acknowledgments_updated_dt_date_gte (Union[Unset, datetime.date]): + acknowledgments_updated_dt_date_lte (Union[Unset, datetime.date]): + acknowledgments_updated_dt_gt (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_gte (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_lt (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_lte (Union[Unset, datetime.datetime]): + acknowledgments_uuid (Union[Unset, UUID]): + affects_affectedness (Union[Unset, OsidbApiV1FlawsListAffectsAffectedness]): + affects_created_dt (Union[Unset, datetime.datetime]): + affects_created_dt_date (Union[Unset, datetime.date]): + affects_created_dt_date_gte (Union[Unset, datetime.date]): + affects_created_dt_date_lte (Union[Unset, datetime.date]): + affects_created_dt_gt (Union[Unset, datetime.datetime]): + affects_created_dt_gte (Union[Unset, datetime.datetime]): + affects_created_dt_lt (Union[Unset, datetime.datetime]): + affects_created_dt_lte (Union[Unset, datetime.datetime]): + affects_embargoed (Union[Unset, bool]): + affects_impact (Union[Unset, OsidbApiV1FlawsListAffectsImpact]): + affects_ps_component (Union[Unset, str]): + affects_ps_module (Union[Unset, str]): + affects_resolution (Union[Unset, OsidbApiV1FlawsListAffectsResolution]): + affects_trackers_created_dt (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_date (Union[Unset, datetime.date]): + affects_trackers_created_dt_date_gte (Union[Unset, datetime.date]): + affects_trackers_created_dt_date_lte (Union[Unset, datetime.date]): + affects_trackers_created_dt_gt (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_gte (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_lt (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_lte (Union[Unset, datetime.datetime]): + affects_trackers_embargoed (Union[Unset, bool]): + affects_trackers_errata_advisory_name (Union[Unset, str]): + affects_trackers_errata_et_id (Union[Unset, int]): + affects_trackers_errata_shipped_dt (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_date (Union[Unset, datetime.date]): + affects_trackers_errata_shipped_dt_date_gte (Union[Unset, datetime.date]): + affects_trackers_errata_shipped_dt_date_lte (Union[Unset, datetime.date]): + affects_trackers_errata_shipped_dt_gt (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_gte (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_lt (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_lte (Union[Unset, datetime.datetime]): + affects_trackers_external_system_id (Union[Unset, str]): + affects_trackers_ps_update_stream (Union[Unset, str]): + affects_trackers_resolution (Union[Unset, str]): + affects_trackers_status (Union[Unset, str]): + affects_trackers_type (Union[Unset, OsidbApiV1FlawsListAffectsTrackersType]): + affects_trackers_updated_dt (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_date (Union[Unset, datetime.date]): + affects_trackers_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_trackers_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_trackers_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_trackers_uuid (Union[Unset, UUID]): + affects_updated_dt (Union[Unset, datetime.datetime]): + affects_updated_dt_date (Union[Unset, datetime.date]): + affects_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_uuid (Union[Unset, UUID]): + bz_id (Union[Unset, float]): + changed_after (Union[Unset, datetime.datetime]): + changed_before (Union[Unset, datetime.datetime]): + comment_zero (Union[Unset, str]): + components (Union[Unset, list[str]]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cve_description (Union[Unset, str]): + cve_description_isempty (Union[Unset, bool]): + cve_id (Union[Unset, list[str]]): + cve_id_isempty (Union[Unset, bool]): + cvss2_nist_isempty (Union[Unset, bool]): + cvss2_rh_isempty (Union[Unset, bool]): + cvss3_nist_isempty (Union[Unset, bool]): + cvss3_rh_isempty (Union[Unset, bool]): + cvss4_nist_isempty (Union[Unset, bool]): + cvss4_rh_isempty (Union[Unset, bool]): + cvss_scores_comment (Union[Unset, str]): + cvss_scores_created_dt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_date (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_created_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_cvss_version (Union[Unset, str]): + cvss_scores_issuer (Union[Unset, OsidbApiV1FlawsListCvssScoresIssuer]): + cvss_scores_score (Union[Unset, float]): + cvss_scores_updated_dt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_date (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_uuid (Union[Unset, UUID]): + cvss_scores_vector (Union[Unset, str]): + cwe_id (Union[Unset, str]): + cwe_id_isempty (Union[Unset, bool]): + embargoed (Union[Unset, bool]): + exclude_fields (Union[Unset, list[str]]): + impact (Union[Unset, OsidbApiV1FlawsListImpact]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + limit (Union[Unset, int]): + major_incident_start_dt (Union[Unset, datetime.datetime]): + major_incident_start_dt_date (Union[Unset, datetime.date]): + major_incident_start_dt_date_gte (Union[Unset, datetime.date]): + major_incident_start_dt_date_lte (Union[Unset, datetime.date]): + major_incident_start_dt_gt (Union[Unset, datetime.datetime]): + major_incident_start_dt_gte (Union[Unset, datetime.datetime]): + major_incident_start_dt_lt (Union[Unset, datetime.datetime]): + major_incident_start_dt_lte (Union[Unset, datetime.datetime]): + major_incident_state (Union[Unset, OsidbApiV1FlawsListMajorIncidentState]): + mitigation_isempty (Union[Unset, bool]): + nist_cvss_validation (Union[Unset, OsidbApiV1FlawsListNistCvssValidation]): + offset (Union[Unset, int]): + order (Union[Unset, list[OsidbApiV1FlawsListOrderItem]]): + owner (Union[Unset, str]): + owner_isempty (Union[Unset, bool]): + query (Union[Unset, str]): + references_created_dt (Union[Unset, datetime.datetime]): + references_created_dt_date (Union[Unset, datetime.date]): + references_created_dt_date_gte (Union[Unset, datetime.date]): + references_created_dt_date_lte (Union[Unset, datetime.date]): + references_created_dt_gt (Union[Unset, datetime.datetime]): + references_created_dt_gte (Union[Unset, datetime.datetime]): + references_created_dt_lt (Union[Unset, datetime.datetime]): + references_created_dt_lte (Union[Unset, datetime.datetime]): + references_description (Union[Unset, str]): + references_type (Union[Unset, OsidbApiV1FlawsListReferencesType]): + references_updated_dt (Union[Unset, datetime.datetime]): + references_updated_dt_date (Union[Unset, datetime.date]): + references_updated_dt_date_gte (Union[Unset, datetime.date]): + references_updated_dt_date_lte (Union[Unset, datetime.date]): + references_updated_dt_gt (Union[Unset, datetime.datetime]): + references_updated_dt_gte (Union[Unset, datetime.datetime]): + references_updated_dt_lt (Union[Unset, datetime.datetime]): + references_updated_dt_lte (Union[Unset, datetime.datetime]): + references_url (Union[Unset, str]): + references_uuid (Union[Unset, UUID]): + reported_dt (Union[Unset, datetime.datetime]): + reported_dt_date (Union[Unset, datetime.date]): + reported_dt_date_gte (Union[Unset, datetime.date]): + reported_dt_date_lte (Union[Unset, datetime.date]): + reported_dt_gt (Union[Unset, datetime.datetime]): + reported_dt_gte (Union[Unset, datetime.datetime]): + reported_dt_lt (Union[Unset, datetime.datetime]): + reported_dt_lte (Union[Unset, datetime.datetime]): + requires_cve_description (Union[Unset, OsidbApiV1FlawsListRequiresCveDescription]): + search (Union[Unset, str]): + source (Union[Unset, OsidbApiV1FlawsListSource]): + statement (Union[Unset, str]): + statement_isempty (Union[Unset, bool]): + team_id (Union[Unset, str]): + title (Union[Unset, str]): + tracker_ids (Union[Unset, list[str]]): + unembargo_dt (Union[Unset, datetime.datetime]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + workflow_state (Union[Unset, list[OsidbApiV1FlawsListWorkflowStateItem]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsListResponse200] + """ + kwargs = _get_kwargs( client=client, acknowledgments_affiliation=acknowledgments_affiliation, @@ -2966,235 +3267,410 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - acknowledgments_affiliation: Union[Unset, None, str] = UNSET, - acknowledgments_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_from_upstream: Union[Unset, None, bool] = UNSET, - acknowledgments_name: Union[Unset, None, str] = UNSET, - acknowledgments_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - acknowledgments_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - acknowledgments_uuid: Union[Unset, None, str] = UNSET, - affects_affectedness: Union[ - Unset, None, OsidbApiV1FlawsListAffectsAffectedness - ] = UNSET, - affects_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_embargoed: Union[Unset, None, bool] = UNSET, - affects_impact: Union[Unset, None, OsidbApiV1FlawsListAffectsImpact] = UNSET, - affects_ps_component: Union[Unset, None, str] = UNSET, - affects_ps_module: Union[Unset, None, str] = UNSET, - affects_resolution: Union[ - Unset, None, OsidbApiV1FlawsListAffectsResolution - ] = UNSET, - affects_trackers_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_embargoed: Union[Unset, None, bool] = UNSET, - affects_trackers_errata_advisory_name: Union[Unset, None, str] = UNSET, - affects_trackers_errata_et_id: Union[Unset, None, int] = UNSET, - affects_trackers_errata_shipped_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_errata_shipped_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_errata_shipped_dt_date_gte: Union[ - Unset, None, datetime.date - ] = UNSET, - affects_trackers_errata_shipped_dt_date_lte: Union[ - Unset, None, datetime.date - ] = UNSET, - affects_trackers_errata_shipped_dt_gt: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_gte: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_lt: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_errata_shipped_dt_lte: Union[ - Unset, None, datetime.datetime - ] = UNSET, - affects_trackers_external_system_id: Union[Unset, None, str] = UNSET, - affects_trackers_ps_update_stream: Union[Unset, None, str] = UNSET, - affects_trackers_resolution: Union[Unset, None, str] = UNSET, - affects_trackers_status: Union[Unset, None, str] = UNSET, - affects_trackers_type: Union[ - Unset, None, OsidbApiV1FlawsListAffectsTrackersType - ] = UNSET, - affects_trackers_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_trackers_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_trackers_uuid: Union[Unset, None, str] = UNSET, - affects_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_uuid: Union[Unset, None, str] = UNSET, - bz_id: Union[Unset, None, float] = UNSET, - changed_after: Union[Unset, None, datetime.datetime] = UNSET, - changed_before: Union[Unset, None, datetime.datetime] = UNSET, - comment_zero: Union[Unset, None, str] = UNSET, - components: Union[Unset, None, List[str]] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cve_description: Union[Unset, None, str] = UNSET, - cve_description_isempty: Union[Unset, None, bool] = UNSET, - cve_id: Union[Unset, None, List[str]] = UNSET, - cve_id_isempty: Union[Unset, None, bool] = UNSET, - cvss2_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss2_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss3_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss3_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss4_nist_isempty: Union[Unset, None, bool] = UNSET, - cvss4_rh_isempty: Union[Unset, None, bool] = UNSET, - cvss_scores_comment: Union[Unset, None, str] = UNSET, - cvss_scores_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_cvss_version: Union[Unset, None, str] = UNSET, - cvss_scores_issuer: Union[Unset, None, OsidbApiV1FlawsListCvssScoresIssuer] = UNSET, - cvss_scores_score: Union[Unset, None, float] = UNSET, - cvss_scores_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - cvss_scores_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - cvss_scores_uuid: Union[Unset, None, str] = UNSET, - cvss_scores_vector: Union[Unset, None, str] = UNSET, - cwe_id: Union[Unset, None, str] = UNSET, - cwe_id_isempty: Union[Unset, None, bool] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - impact: Union[Unset, None, OsidbApiV1FlawsListImpact] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - major_incident_start_dt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_date: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - major_incident_start_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_start_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - major_incident_state: Union[ - Unset, None, OsidbApiV1FlawsListMajorIncidentState - ] = UNSET, - mitigation_isempty: Union[Unset, None, bool] = UNSET, - nist_cvss_validation: Union[ - Unset, None, OsidbApiV1FlawsListNistCvssValidation - ] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1FlawsListOrderItem]] = UNSET, - owner: Union[Unset, None, str] = UNSET, - owner_isempty: Union[Unset, None, bool] = UNSET, - query: Union[Unset, None, str] = UNSET, - references_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - references_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - references_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - references_description: Union[Unset, None, str] = UNSET, - references_type: Union[Unset, None, OsidbApiV1FlawsListReferencesType] = UNSET, - references_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - references_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - references_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - references_url: Union[Unset, None, str] = UNSET, - references_uuid: Union[Unset, None, str] = UNSET, - reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - requires_cve_description: Union[ - Unset, None, OsidbApiV1FlawsListRequiresCveDescription - ] = UNSET, - search: Union[Unset, None, str] = UNSET, - source: Union[Unset, None, OsidbApiV1FlawsListSource] = UNSET, - statement: Union[Unset, None, str] = UNSET, - statement_isempty: Union[Unset, None, bool] = UNSET, - team_id: Union[Unset, None, str] = UNSET, - title: Union[Unset, None, str] = UNSET, - tracker_ids: Union[Unset, None, List[str]] = UNSET, - unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - workflow_state: Union[ - Unset, None, List[OsidbApiV1FlawsListWorkflowStateItem] - ] = UNSET, + acknowledgments_affiliation: Union[Unset, str] = UNSET, + acknowledgments_created_dt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_date: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + acknowledgments_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_from_upstream: Union[Unset, bool] = UNSET, + acknowledgments_name: Union[Unset, str] = UNSET, + acknowledgments_updated_dt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_date: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + acknowledgments_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + acknowledgments_uuid: Union[Unset, UUID] = UNSET, + affects_affectedness: Union[Unset, OsidbApiV1FlawsListAffectsAffectedness] = UNSET, + affects_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_embargoed: Union[Unset, bool] = UNSET, + affects_impact: Union[Unset, OsidbApiV1FlawsListAffectsImpact] = UNSET, + affects_ps_component: Union[Unset, str] = UNSET, + affects_ps_module: Union[Unset, str] = UNSET, + affects_resolution: Union[Unset, OsidbApiV1FlawsListAffectsResolution] = UNSET, + affects_trackers_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_embargoed: Union[Unset, bool] = UNSET, + affects_trackers_errata_advisory_name: Union[Unset, str] = UNSET, + affects_trackers_errata_et_id: Union[Unset, int] = UNSET, + affects_trackers_errata_shipped_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_errata_shipped_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_errata_shipped_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_external_system_id: Union[Unset, str] = UNSET, + affects_trackers_ps_update_stream: Union[Unset, str] = UNSET, + affects_trackers_resolution: Union[Unset, str] = UNSET, + affects_trackers_status: Union[Unset, str] = UNSET, + affects_trackers_type: Union[Unset, OsidbApiV1FlawsListAffectsTrackersType] = UNSET, + affects_trackers_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_trackers_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_trackers_uuid: Union[Unset, UUID] = UNSET, + affects_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_uuid: Union[Unset, UUID] = UNSET, + bz_id: Union[Unset, float] = UNSET, + changed_after: Union[Unset, datetime.datetime] = UNSET, + changed_before: Union[Unset, datetime.datetime] = UNSET, + comment_zero: Union[Unset, str] = UNSET, + components: Union[Unset, list[str]] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cve_description: Union[Unset, str] = UNSET, + cve_description_isempty: Union[Unset, bool] = UNSET, + cve_id: Union[Unset, list[str]] = UNSET, + cve_id_isempty: Union[Unset, bool] = UNSET, + cvss2_nist_isempty: Union[Unset, bool] = UNSET, + cvss2_rh_isempty: Union[Unset, bool] = UNSET, + cvss3_nist_isempty: Union[Unset, bool] = UNSET, + cvss3_rh_isempty: Union[Unset, bool] = UNSET, + cvss4_nist_isempty: Union[Unset, bool] = UNSET, + cvss4_rh_isempty: Union[Unset, bool] = UNSET, + cvss_scores_comment: Union[Unset, str] = UNSET, + cvss_scores_created_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_cvss_version: Union[Unset, str] = UNSET, + cvss_scores_issuer: Union[Unset, OsidbApiV1FlawsListCvssScoresIssuer] = UNSET, + cvss_scores_score: Union[Unset, float] = UNSET, + cvss_scores_updated_dt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_date: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + cvss_scores_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + cvss_scores_uuid: Union[Unset, UUID] = UNSET, + cvss_scores_vector: Union[Unset, str] = UNSET, + cwe_id: Union[Unset, str] = UNSET, + cwe_id_isempty: Union[Unset, bool] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + impact: Union[Unset, OsidbApiV1FlawsListImpact] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + major_incident_start_dt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_date: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_date_gte: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_date_lte: Union[Unset, datetime.date] = UNSET, + major_incident_start_dt_gt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_gte: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_lt: Union[Unset, datetime.datetime] = UNSET, + major_incident_start_dt_lte: Union[Unset, datetime.datetime] = UNSET, + major_incident_state: Union[Unset, OsidbApiV1FlawsListMajorIncidentState] = UNSET, + mitigation_isempty: Union[Unset, bool] = UNSET, + nist_cvss_validation: Union[Unset, OsidbApiV1FlawsListNistCvssValidation] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1FlawsListOrderItem]] = UNSET, + owner: Union[Unset, str] = UNSET, + owner_isempty: Union[Unset, bool] = UNSET, + query: Union[Unset, str] = UNSET, + references_created_dt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_date: Union[Unset, datetime.date] = UNSET, + references_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + references_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + references_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + references_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + references_description: Union[Unset, str] = UNSET, + references_type: Union[Unset, OsidbApiV1FlawsListReferencesType] = UNSET, + references_updated_dt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_date: Union[Unset, datetime.date] = UNSET, + references_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + references_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + references_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + references_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + references_url: Union[Unset, str] = UNSET, + references_uuid: Union[Unset, UUID] = UNSET, + reported_dt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_date: Union[Unset, datetime.date] = UNSET, + reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + requires_cve_description: Union[Unset, OsidbApiV1FlawsListRequiresCveDescription] = UNSET, + search: Union[Unset, str] = UNSET, + source: Union[Unset, OsidbApiV1FlawsListSource] = UNSET, + statement: Union[Unset, str] = UNSET, + statement_isempty: Union[Unset, bool] = UNSET, + team_id: Union[Unset, str] = UNSET, + title: Union[Unset, str] = UNSET, + tracker_ids: Union[Unset, list[str]] = UNSET, + unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + workflow_state: Union[Unset, list[OsidbApiV1FlawsListWorkflowStateItem]] = UNSET, ) -> Optional[OsidbApiV1FlawsListResponse200]: - """ """ + """ + Args: + acknowledgments_affiliation (Union[Unset, str]): + acknowledgments_created_dt (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_date (Union[Unset, datetime.date]): + acknowledgments_created_dt_date_gte (Union[Unset, datetime.date]): + acknowledgments_created_dt_date_lte (Union[Unset, datetime.date]): + acknowledgments_created_dt_gt (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_gte (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_lt (Union[Unset, datetime.datetime]): + acknowledgments_created_dt_lte (Union[Unset, datetime.datetime]): + acknowledgments_from_upstream (Union[Unset, bool]): + acknowledgments_name (Union[Unset, str]): + acknowledgments_updated_dt (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_date (Union[Unset, datetime.date]): + acknowledgments_updated_dt_date_gte (Union[Unset, datetime.date]): + acknowledgments_updated_dt_date_lte (Union[Unset, datetime.date]): + acknowledgments_updated_dt_gt (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_gte (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_lt (Union[Unset, datetime.datetime]): + acknowledgments_updated_dt_lte (Union[Unset, datetime.datetime]): + acknowledgments_uuid (Union[Unset, UUID]): + affects_affectedness (Union[Unset, OsidbApiV1FlawsListAffectsAffectedness]): + affects_created_dt (Union[Unset, datetime.datetime]): + affects_created_dt_date (Union[Unset, datetime.date]): + affects_created_dt_date_gte (Union[Unset, datetime.date]): + affects_created_dt_date_lte (Union[Unset, datetime.date]): + affects_created_dt_gt (Union[Unset, datetime.datetime]): + affects_created_dt_gte (Union[Unset, datetime.datetime]): + affects_created_dt_lt (Union[Unset, datetime.datetime]): + affects_created_dt_lte (Union[Unset, datetime.datetime]): + affects_embargoed (Union[Unset, bool]): + affects_impact (Union[Unset, OsidbApiV1FlawsListAffectsImpact]): + affects_ps_component (Union[Unset, str]): + affects_ps_module (Union[Unset, str]): + affects_resolution (Union[Unset, OsidbApiV1FlawsListAffectsResolution]): + affects_trackers_created_dt (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_date (Union[Unset, datetime.date]): + affects_trackers_created_dt_date_gte (Union[Unset, datetime.date]): + affects_trackers_created_dt_date_lte (Union[Unset, datetime.date]): + affects_trackers_created_dt_gt (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_gte (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_lt (Union[Unset, datetime.datetime]): + affects_trackers_created_dt_lte (Union[Unset, datetime.datetime]): + affects_trackers_embargoed (Union[Unset, bool]): + affects_trackers_errata_advisory_name (Union[Unset, str]): + affects_trackers_errata_et_id (Union[Unset, int]): + affects_trackers_errata_shipped_dt (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_date (Union[Unset, datetime.date]): + affects_trackers_errata_shipped_dt_date_gte (Union[Unset, datetime.date]): + affects_trackers_errata_shipped_dt_date_lte (Union[Unset, datetime.date]): + affects_trackers_errata_shipped_dt_gt (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_gte (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_lt (Union[Unset, datetime.datetime]): + affects_trackers_errata_shipped_dt_lte (Union[Unset, datetime.datetime]): + affects_trackers_external_system_id (Union[Unset, str]): + affects_trackers_ps_update_stream (Union[Unset, str]): + affects_trackers_resolution (Union[Unset, str]): + affects_trackers_status (Union[Unset, str]): + affects_trackers_type (Union[Unset, OsidbApiV1FlawsListAffectsTrackersType]): + affects_trackers_updated_dt (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_date (Union[Unset, datetime.date]): + affects_trackers_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_trackers_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_trackers_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_trackers_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_trackers_uuid (Union[Unset, UUID]): + affects_updated_dt (Union[Unset, datetime.datetime]): + affects_updated_dt_date (Union[Unset, datetime.date]): + affects_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_uuid (Union[Unset, UUID]): + bz_id (Union[Unset, float]): + changed_after (Union[Unset, datetime.datetime]): + changed_before (Union[Unset, datetime.datetime]): + comment_zero (Union[Unset, str]): + components (Union[Unset, list[str]]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + cve_description (Union[Unset, str]): + cve_description_isempty (Union[Unset, bool]): + cve_id (Union[Unset, list[str]]): + cve_id_isempty (Union[Unset, bool]): + cvss2_nist_isempty (Union[Unset, bool]): + cvss2_rh_isempty (Union[Unset, bool]): + cvss3_nist_isempty (Union[Unset, bool]): + cvss3_rh_isempty (Union[Unset, bool]): + cvss4_nist_isempty (Union[Unset, bool]): + cvss4_rh_isempty (Union[Unset, bool]): + cvss_scores_comment (Union[Unset, str]): + cvss_scores_created_dt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_date (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_created_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_created_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_created_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_cvss_version (Union[Unset, str]): + cvss_scores_issuer (Union[Unset, OsidbApiV1FlawsListCvssScoresIssuer]): + cvss_scores_score (Union[Unset, float]): + cvss_scores_updated_dt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_date (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_gte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_date_lte (Union[Unset, datetime.date]): + cvss_scores_updated_dt_gt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_gte (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lt (Union[Unset, datetime.datetime]): + cvss_scores_updated_dt_lte (Union[Unset, datetime.datetime]): + cvss_scores_uuid (Union[Unset, UUID]): + cvss_scores_vector (Union[Unset, str]): + cwe_id (Union[Unset, str]): + cwe_id_isempty (Union[Unset, bool]): + embargoed (Union[Unset, bool]): + exclude_fields (Union[Unset, list[str]]): + impact (Union[Unset, OsidbApiV1FlawsListImpact]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + limit (Union[Unset, int]): + major_incident_start_dt (Union[Unset, datetime.datetime]): + major_incident_start_dt_date (Union[Unset, datetime.date]): + major_incident_start_dt_date_gte (Union[Unset, datetime.date]): + major_incident_start_dt_date_lte (Union[Unset, datetime.date]): + major_incident_start_dt_gt (Union[Unset, datetime.datetime]): + major_incident_start_dt_gte (Union[Unset, datetime.datetime]): + major_incident_start_dt_lt (Union[Unset, datetime.datetime]): + major_incident_start_dt_lte (Union[Unset, datetime.datetime]): + major_incident_state (Union[Unset, OsidbApiV1FlawsListMajorIncidentState]): + mitigation_isempty (Union[Unset, bool]): + nist_cvss_validation (Union[Unset, OsidbApiV1FlawsListNistCvssValidation]): + offset (Union[Unset, int]): + order (Union[Unset, list[OsidbApiV1FlawsListOrderItem]]): + owner (Union[Unset, str]): + owner_isempty (Union[Unset, bool]): + query (Union[Unset, str]): + references_created_dt (Union[Unset, datetime.datetime]): + references_created_dt_date (Union[Unset, datetime.date]): + references_created_dt_date_gte (Union[Unset, datetime.date]): + references_created_dt_date_lte (Union[Unset, datetime.date]): + references_created_dt_gt (Union[Unset, datetime.datetime]): + references_created_dt_gte (Union[Unset, datetime.datetime]): + references_created_dt_lt (Union[Unset, datetime.datetime]): + references_created_dt_lte (Union[Unset, datetime.datetime]): + references_description (Union[Unset, str]): + references_type (Union[Unset, OsidbApiV1FlawsListReferencesType]): + references_updated_dt (Union[Unset, datetime.datetime]): + references_updated_dt_date (Union[Unset, datetime.date]): + references_updated_dt_date_gte (Union[Unset, datetime.date]): + references_updated_dt_date_lte (Union[Unset, datetime.date]): + references_updated_dt_gt (Union[Unset, datetime.datetime]): + references_updated_dt_gte (Union[Unset, datetime.datetime]): + references_updated_dt_lt (Union[Unset, datetime.datetime]): + references_updated_dt_lte (Union[Unset, datetime.datetime]): + references_url (Union[Unset, str]): + references_uuid (Union[Unset, UUID]): + reported_dt (Union[Unset, datetime.datetime]): + reported_dt_date (Union[Unset, datetime.date]): + reported_dt_date_gte (Union[Unset, datetime.date]): + reported_dt_date_lte (Union[Unset, datetime.date]): + reported_dt_gt (Union[Unset, datetime.datetime]): + reported_dt_gte (Union[Unset, datetime.datetime]): + reported_dt_lt (Union[Unset, datetime.datetime]): + reported_dt_lte (Union[Unset, datetime.datetime]): + requires_cve_description (Union[Unset, OsidbApiV1FlawsListRequiresCveDescription]): + search (Union[Unset, str]): + source (Union[Unset, OsidbApiV1FlawsListSource]): + statement (Union[Unset, str]): + statement_isempty (Union[Unset, bool]): + team_id (Union[Unset, str]): + title (Union[Unset, str]): + tracker_ids (Union[Unset, list[str]]): + unembargo_dt (Union[Unset, datetime.datetime]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + workflow_state (Union[Unset, list[OsidbApiV1FlawsListWorkflowStateItem]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, acknowledgments_affiliation=acknowledgments_affiliation, acknowledgments_created_dt=acknowledgments_created_dt, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_create.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_create.py index 9fe9003..1c4f85b 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_create.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_create.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.flaw_package_version_post import FlawPackageVersionPost from ...models.osidb_api_v1_flaws_package_versions_create_response_201 import ( OsidbApiV1FlawsPackageVersionsCreateResponse201, @@ -10,81 +12,96 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = FlawPackageVersionPost def _get_kwargs( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawPackageVersionPost, - multipart_data: FlawPackageVersionPost, - json_body: FlawPackageVersionPost, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/package_versions".format( - client.base_url, - flaw_id=flaw_id, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + FlawPackageVersionPost, + FlawPackageVersionPost, + FlawPackageVersionPost, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/package_versions".format( + flaw_id=flaw_id, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, FlawPackageVersionPost): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsPackageVersionsCreateResponse201]: if response.status_code == 201: + # } _response_201 = response.json() response_201: OsidbApiV1FlawsPackageVersionsCreateResponse201 if isinstance(_response_201, Unset): response_201 = UNSET else: - response_201 = OsidbApiV1FlawsPackageVersionsCreateResponse201.from_dict( - _response_201 - ) + response_201 = OsidbApiV1FlawsPackageVersionsCreateResponse201.from_dict(_response_201) return response_201 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsPackageVersionsCreateResponse201]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawPackageVersionPost, - multipart_data: FlawPackageVersionPost, - json_body: FlawPackageVersionPost, + body: Union[ + FlawPackageVersionPost, + FlawPackageVersionPost, + FlawPackageVersionPost, + ], ) -> Response[OsidbApiV1FlawsPackageVersionsCreateResponse201]: + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawPackageVersionPost): Package model serializer + body (FlawPackageVersionPost): Package model serializer + body (FlawPackageVersionPost): Package model serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsPackageVersionsCreateResponse201] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -95,42 +112,72 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawPackageVersionPost, - multipart_data: FlawPackageVersionPost, - json_body: FlawPackageVersionPost, + body: Union[ + FlawPackageVersionPost, + FlawPackageVersionPost, + FlawPackageVersionPost, + ], ) -> Optional[OsidbApiV1FlawsPackageVersionsCreateResponse201]: - """ """ + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawPackageVersionPost): Package model serializer + body (FlawPackageVersionPost): Package model serializer + body (FlawPackageVersionPost): Package model serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsPackageVersionsCreateResponse201 + """ return sync_detailed( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawPackageVersionPost, - multipart_data: FlawPackageVersionPost, - json_body: FlawPackageVersionPost, + body: Union[ + FlawPackageVersionPost, + FlawPackageVersionPost, + FlawPackageVersionPost, + ], ) -> Response[OsidbApiV1FlawsPackageVersionsCreateResponse201]: + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawPackageVersionPost): Package model serializer + body (FlawPackageVersionPost): Package model serializer + body (FlawPackageVersionPost): Package model serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsPackageVersionsCreateResponse201] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -141,25 +188,39 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawPackageVersionPost, - multipart_data: FlawPackageVersionPost, - json_body: FlawPackageVersionPost, + body: Union[ + FlawPackageVersionPost, + FlawPackageVersionPost, + FlawPackageVersionPost, + ], ) -> Optional[OsidbApiV1FlawsPackageVersionsCreateResponse201]: - """ """ + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawPackageVersionPost): Package model serializer + body (FlawPackageVersionPost): Package model serializer + body (FlawPackageVersionPost): Package model serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsPackageVersionsCreateResponse201 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_destroy.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_destroy.py index 445f25d..96ffebd 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_destroy.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_destroy.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_package_versions_destroy_response_200 import ( OsidbApiV1FlawsPackageVersionsDestroyResponse200, ) @@ -12,59 +14,71 @@ def _get_kwargs( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/package_versions/{id}".format( - client.base_url, - flaw_id=flaw_id, - id=id, - ) +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/package_versions/{id}".format( + flaw_id=flaw_id, + id=id, + ), } + _kwargs["headers"] = headers + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsPackageVersionsDestroyResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsPackageVersionsDestroyResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsPackageVersionsDestroyResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsPackageVersionsDestroyResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsPackageVersionsDestroyResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1FlawsPackageVersionsDestroyResponse200]: + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsPackageVersionsDestroyResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -79,16 +93,29 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1FlawsPackageVersionsDestroyResponse200]: - """Destroy the instance and proxy the delete to Bugzilla""" + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsPackageVersionsDestroyResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -97,12 +124,27 @@ def sync( ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1FlawsPackageVersionsDestroyResponse200]: + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsPackageVersionsDestroyResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -117,19 +159,32 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1FlawsPackageVersionsDestroyResponse200]: - """Destroy the instance and proxy the delete to Bugzilla""" + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsPackageVersionsDestroyResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, id=id, client=client, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_list.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_list.py index 6b40dc1..74000d6 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_list.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_list.py @@ -1,9 +1,11 @@ import datetime -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_package_versions_list_response_200 import ( OsidbApiV1FlawsPackageVersionsListResponse200, ) @@ -18,8 +20,8 @@ "created_dt__gte": datetime.datetime, "created_dt__lt": datetime.datetime, "created_dt__lte": datetime.datetime, - "exclude_fields": List[str], - "include_fields": List[str], + "exclude_fields": list[str], + "include_fields": list[str], "limit": int, "offset": int, "package": str, @@ -31,222 +33,264 @@ "updated_dt__gte": datetime.datetime, "updated_dt__lt": datetime.datetime, "updated_dt__lte": datetime.datetime, - "uuid": str, + "uuid": UUID, "versions__version": str, } def _get_kwargs( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - package: Union[Unset, None, str] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - versions_version: Union[Unset, None, str] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/package_versions".format( - client.base_url, - flaw_id=flaw_id, - ) - - headers: Dict[str, Any] = client.get_headers() - - json_created_dt: Union[Unset, None, str] = UNSET + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + package: Union[Unset, str] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + versions_version: Union[Unset, str] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + json_created_dt: Union[Unset, str] = UNSET if not isinstance(created_dt, Unset): - json_created_dt = created_dt.isoformat() if created_dt else None + json_created_dt = created_dt.isoformat() + + params["created_dt"] = json_created_dt - json_created_dt_date: Union[Unset, None, str] = UNSET + json_created_dt_date: Union[Unset, str] = UNSET if not isinstance(created_dt_date, Unset): - json_created_dt_date = created_dt_date.isoformat() if created_dt_date else None + json_created_dt_date = created_dt_date.isoformat() - json_created_dt_date_gte: Union[Unset, None, str] = UNSET + params["created_dt__date"] = json_created_dt_date + + json_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_gte, Unset): - json_created_dt_date_gte = ( - created_dt_date_gte.isoformat() if created_dt_date_gte else None - ) + json_created_dt_date_gte = created_dt_date_gte.isoformat() + + params["created_dt__date__gte"] = json_created_dt_date_gte - json_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_lte, Unset): - json_created_dt_date_lte = ( - created_dt_date_lte.isoformat() if created_dt_date_lte else None - ) + json_created_dt_date_lte = created_dt_date_lte.isoformat() + + params["created_dt__date__lte"] = json_created_dt_date_lte - json_created_dt_gt: Union[Unset, None, str] = UNSET + json_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(created_dt_gt, Unset): - json_created_dt_gt = created_dt_gt.isoformat() if created_dt_gt else None + json_created_dt_gt = created_dt_gt.isoformat() - json_created_dt_gte: Union[Unset, None, str] = UNSET + params["created_dt__gt"] = json_created_dt_gt + + json_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_gte, Unset): - json_created_dt_gte = created_dt_gte.isoformat() if created_dt_gte else None + json_created_dt_gte = created_dt_gte.isoformat() + + params["created_dt__gte"] = json_created_dt_gte - json_created_dt_lt: Union[Unset, None, str] = UNSET + json_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(created_dt_lt, Unset): - json_created_dt_lt = created_dt_lt.isoformat() if created_dt_lt else None + json_created_dt_lt = created_dt_lt.isoformat() + + params["created_dt__lt"] = json_created_dt_lt - json_created_dt_lte: Union[Unset, None, str] = UNSET + json_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_lte, Unset): - json_created_dt_lte = created_dt_lte.isoformat() if created_dt_lte else None + json_created_dt_lte = created_dt_lte.isoformat() - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + params["created_dt__lte"] = json_created_dt_lte + + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + params["exclude_fields"] = json_exclude_fields + + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields + + params["limit"] = limit + + params["offset"] = offset - json_updated_dt: Union[Unset, None, str] = UNSET + params["package"] = package + + json_updated_dt: Union[Unset, str] = UNSET if not isinstance(updated_dt, Unset): - json_updated_dt = updated_dt.isoformat() if updated_dt else None + json_updated_dt = updated_dt.isoformat() + + params["updated_dt"] = json_updated_dt - json_updated_dt_date: Union[Unset, None, str] = UNSET + json_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(updated_dt_date, Unset): - json_updated_dt_date = updated_dt_date.isoformat() if updated_dt_date else None + json_updated_dt_date = updated_dt_date.isoformat() + + params["updated_dt__date"] = json_updated_dt_date - json_updated_dt_date_gte: Union[Unset, None, str] = UNSET + json_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_gte, Unset): - json_updated_dt_date_gte = ( - updated_dt_date_gte.isoformat() if updated_dt_date_gte else None - ) + json_updated_dt_date_gte = updated_dt_date_gte.isoformat() + + params["updated_dt__date__gte"] = json_updated_dt_date_gte - json_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_lte, Unset): - json_updated_dt_date_lte = ( - updated_dt_date_lte.isoformat() if updated_dt_date_lte else None - ) + json_updated_dt_date_lte = updated_dt_date_lte.isoformat() + + params["updated_dt__date__lte"] = json_updated_dt_date_lte - json_updated_dt_gt: Union[Unset, None, str] = UNSET + json_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(updated_dt_gt, Unset): - json_updated_dt_gt = updated_dt_gt.isoformat() if updated_dt_gt else None + json_updated_dt_gt = updated_dt_gt.isoformat() + + params["updated_dt__gt"] = json_updated_dt_gt - json_updated_dt_gte: Union[Unset, None, str] = UNSET + json_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_gte, Unset): - json_updated_dt_gte = updated_dt_gte.isoformat() if updated_dt_gte else None + json_updated_dt_gte = updated_dt_gte.isoformat() - json_updated_dt_lt: Union[Unset, None, str] = UNSET + params["updated_dt__gte"] = json_updated_dt_gte + + json_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(updated_dt_lt, Unset): - json_updated_dt_lt = updated_dt_lt.isoformat() if updated_dt_lt else None + json_updated_dt_lt = updated_dt_lt.isoformat() + + params["updated_dt__lt"] = json_updated_dt_lt - json_updated_dt_lte: Union[Unset, None, str] = UNSET + json_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_lte, Unset): - json_updated_dt_lte = updated_dt_lte.isoformat() if updated_dt_lte else None - - params: Dict[str, Any] = { - "created_dt": json_created_dt, - "created_dt__date": json_created_dt_date, - "created_dt__date__gte": json_created_dt_date_gte, - "created_dt__date__lte": json_created_dt_date_lte, - "created_dt__gt": json_created_dt_gt, - "created_dt__gte": json_created_dt_gte, - "created_dt__lt": json_created_dt_lt, - "created_dt__lte": json_created_dt_lte, - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - "limit": limit, - "offset": offset, - "package": package, - "updated_dt": json_updated_dt, - "updated_dt__date": json_updated_dt_date, - "updated_dt__date__gte": json_updated_dt_date_gte, - "updated_dt__date__lte": json_updated_dt_date_lte, - "updated_dt__gt": json_updated_dt_gt, - "updated_dt__gte": json_updated_dt_gte, - "updated_dt__lt": json_updated_dt_lt, - "updated_dt__lte": json_updated_dt_lte, - "uuid": uuid, - "versions__version": versions_version, - } + json_updated_dt_lte = updated_dt_lte.isoformat() + + params["updated_dt__lte"] = json_updated_dt_lte + + json_uuid: Union[Unset, str] = UNSET + if not isinstance(uuid, Unset): + json_uuid = str(uuid) + + params["uuid"] = json_uuid + + params["versions__version"] = versions_version + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/package_versions".format( + flaw_id=flaw_id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsPackageVersionsListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsPackageVersionsListResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsPackageVersionsListResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsPackageVersionsListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsPackageVersionsListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - package: Union[Unset, None, str] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - versions_version: Union[Unset, None, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + package: Union[Unset, str] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + versions_version: Union[Unset, str] = UNSET, ) -> Response[OsidbApiV1FlawsPackageVersionsListResponse200]: + """ + Args: + flaw_id (UUID): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + package (Union[Unset, str]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + versions_version (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsPackageVersionsListResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, @@ -283,38 +327,71 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - package: Union[Unset, None, str] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - versions_version: Union[Unset, None, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + package: Union[Unset, str] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + versions_version: Union[Unset, str] = UNSET, ) -> Optional[OsidbApiV1FlawsPackageVersionsListResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + package (Union[Unset, str]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + versions_version (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsPackageVersionsListResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -345,34 +422,69 @@ def sync( ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, *, client: AuthenticatedClient, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - package: Union[Unset, None, str] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - versions_version: Union[Unset, None, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + package: Union[Unset, str] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + versions_version: Union[Unset, str] = UNSET, ) -> Response[OsidbApiV1FlawsPackageVersionsListResponse200]: + """ + Args: + flaw_id (UUID): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + package (Union[Unset, str]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + versions_version (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsPackageVersionsListResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, @@ -409,41 +521,74 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, *, client: AuthenticatedClient, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - package: Union[Unset, None, str] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, - versions_version: Union[Unset, None, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + package: Union[Unset, str] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, + versions_version: Union[Unset, str] = UNSET, ) -> Optional[OsidbApiV1FlawsPackageVersionsListResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + package (Union[Unset, str]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + versions_version (Union[Unset, str]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsPackageVersionsListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, client=client, created_dt=created_dt, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_retrieve.py index f7ad815..571d23f 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_retrieve.py @@ -1,98 +1,105 @@ -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_package_versions_retrieve_response_200 import ( OsidbApiV1FlawsPackageVersionsRetrieveResponse200, ) from ...types import UNSET, Response, Unset QUERY_PARAMS = { - "exclude_fields": List[str], - "include_fields": List[str], + "exclude_fields": list[str], + "include_fields": list[str], } def _get_kwargs( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/package_versions/{id}".format( - client.base_url, - flaw_id=flaw_id, - id=id, - ) + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - headers: Dict[str, Any] = client.get_headers() - - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields - params: Dict[str, Any] = { - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/package_versions/{id}".format( + flaw_id=flaw_id, + id=id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsPackageVersionsRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsPackageVersionsRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsPackageVersionsRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsPackageVersionsRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsPackageVersionsRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1FlawsPackageVersionsRetrieveResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsPackageVersionsRetrieveResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -109,18 +116,31 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1FlawsPackageVersionsRetrieveResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsPackageVersionsRetrieveResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -131,14 +151,29 @@ def sync( ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1FlawsPackageVersionsRetrieveResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsPackageVersionsRetrieveResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -155,21 +190,34 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1FlawsPackageVersionsRetrieveResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsPackageVersionsRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, id=id, client=client, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_update.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_update.py index 2289f9e..681609c 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_update.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_package_versions_update.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.flaw_package_version_put import FlawPackageVersionPut from ...models.osidb_api_v1_flaws_package_versions_update_response_200 import ( OsidbApiV1FlawsPackageVersionsUpdateResponse200, @@ -10,85 +12,101 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = FlawPackageVersionPut def _get_kwargs( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawPackageVersionPut, - multipart_data: FlawPackageVersionPut, - json_body: FlawPackageVersionPut, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/package_versions/{id}".format( - client.base_url, - flaw_id=flaw_id, - id=id, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + FlawPackageVersionPut, + FlawPackageVersionPut, + FlawPackageVersionPut, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/package_versions/{id}".format( + flaw_id=flaw_id, + id=id, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, FlawPackageVersionPut): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsPackageVersionsUpdateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsPackageVersionsUpdateResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsPackageVersionsUpdateResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsPackageVersionsUpdateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsPackageVersionsUpdateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawPackageVersionPut, - multipart_data: FlawPackageVersionPut, - json_body: FlawPackageVersionPut, + body: Union[ + FlawPackageVersionPut, + FlawPackageVersionPut, + FlawPackageVersionPut, + ], ) -> Response[OsidbApiV1FlawsPackageVersionsUpdateResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawPackageVersionPut): Package model serializer + body (FlawPackageVersionPut): Package model serializer + body (FlawPackageVersionPut): Package model serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsPackageVersionsUpdateResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.put( @@ -99,46 +117,78 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawPackageVersionPut, - multipart_data: FlawPackageVersionPut, - json_body: FlawPackageVersionPut, + body: Union[ + FlawPackageVersionPut, + FlawPackageVersionPut, + FlawPackageVersionPut, + ], ) -> Optional[OsidbApiV1FlawsPackageVersionsUpdateResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawPackageVersionPut): Package model serializer + body (FlawPackageVersionPut): Package model serializer + body (FlawPackageVersionPut): Package model serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsPackageVersionsUpdateResponse200 + """ return sync_detailed( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawPackageVersionPut, - multipart_data: FlawPackageVersionPut, - json_body: FlawPackageVersionPut, + body: Union[ + FlawPackageVersionPut, + FlawPackageVersionPut, + FlawPackageVersionPut, + ], ) -> Response[OsidbApiV1FlawsPackageVersionsUpdateResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawPackageVersionPut): Package model serializer + body (FlawPackageVersionPut): Package model serializer + body (FlawPackageVersionPut): Package model serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsPackageVersionsUpdateResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().put( @@ -149,27 +199,42 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawPackageVersionPut, - multipart_data: FlawPackageVersionPut, - json_body: FlawPackageVersionPut, + body: Union[ + FlawPackageVersionPut, + FlawPackageVersionPut, + FlawPackageVersionPut, + ], ) -> Optional[OsidbApiV1FlawsPackageVersionsUpdateResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawPackageVersionPut): Package model serializer + body (FlawPackageVersionPut): Package model serializer + body (FlawPackageVersionPut): Package model serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsPackageVersionsUpdateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_promote_create.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_promote_create.py index f3faf82..db824a2 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_promote_create.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_promote_create.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_promote_create_response_200 import ( OsidbApiV1FlawsPromoteCreateResponse200, ) @@ -15,45 +16,42 @@ def _get_kwargs( flaw_id: str, *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/promote".format( - client.base_url, - flaw_id=flaw_id, - ) +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/promote".format( + flaw_id=flaw_id, + ), } + _kwargs["headers"] = headers + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsPromoteCreateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsPromoteCreateResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsPromoteCreateResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsPromoteCreateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsPromoteCreateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -62,6 +60,24 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[OsidbApiV1FlawsPromoteCreateResponse200]: + """workflow promotion API endpoint + + try to adjust workflow classification of flaw to the next state available + return its workflow:state classification or errors if not possible to promote + + Args: + flaw_id (str): + bugzilla_api_key (str): + jira_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsPromoteCreateResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, @@ -75,7 +91,7 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( @@ -86,7 +102,20 @@ def sync( """workflow promotion API endpoint try to adjust workflow classification of flaw to the next state available - return its workflow:state classification or errors if not possible to promote""" + return its workflow:state classification or errors if not possible to promote + + Args: + flaw_id (str): + bugzilla_api_key (str): + jira_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsPromoteCreateResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -94,11 +123,29 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( flaw_id: str, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1FlawsPromoteCreateResponse200]: + """workflow promotion API endpoint + + try to adjust workflow classification of flaw to the next state available + return its workflow:state classification or errors if not possible to promote + + Args: + flaw_id (str): + bugzilla_api_key (str): + jira_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsPromoteCreateResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, @@ -112,10 +159,10 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( flaw_id: str, *, client: AuthenticatedClient, @@ -123,10 +170,23 @@ async def async_( """workflow promotion API endpoint try to adjust workflow classification of flaw to the next state available - return its workflow:state classification or errors if not possible to promote""" + return its workflow:state classification or errors if not possible to promote + + Args: + flaw_id (str): + bugzilla_api_key (str): + jira_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsPromoteCreateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, client=client, ) diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_create.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_create.py index a08bb46..f3a8407 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_create.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_create.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.flaw_reference_post import FlawReferencePost from ...models.osidb_api_v1_flaws_references_create_response_201 import ( OsidbApiV1FlawsReferencesCreateResponse201, @@ -10,81 +12,96 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = FlawReferencePost def _get_kwargs( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawReferencePost, - multipart_data: FlawReferencePost, - json_body: FlawReferencePost, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/references".format( - client.base_url, - flaw_id=flaw_id, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + FlawReferencePost, + FlawReferencePost, + FlawReferencePost, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/references".format( + flaw_id=flaw_id, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, FlawReferencePost): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsReferencesCreateResponse201]: if response.status_code == 201: + # } _response_201 = response.json() response_201: OsidbApiV1FlawsReferencesCreateResponse201 if isinstance(_response_201, Unset): response_201 = UNSET else: - response_201 = OsidbApiV1FlawsReferencesCreateResponse201.from_dict( - _response_201 - ) + response_201 = OsidbApiV1FlawsReferencesCreateResponse201.from_dict(_response_201) return response_201 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsReferencesCreateResponse201]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawReferencePost, - multipart_data: FlawReferencePost, - json_body: FlawReferencePost, + body: Union[ + FlawReferencePost, + FlawReferencePost, + FlawReferencePost, + ], ) -> Response[OsidbApiV1FlawsReferencesCreateResponse201]: + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawReferencePost): FlawReference serializer + body (FlawReferencePost): FlawReference serializer + body (FlawReferencePost): FlawReference serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsReferencesCreateResponse201] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -95,42 +112,72 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawReferencePost, - multipart_data: FlawReferencePost, - json_body: FlawReferencePost, + body: Union[ + FlawReferencePost, + FlawReferencePost, + FlawReferencePost, + ], ) -> Optional[OsidbApiV1FlawsReferencesCreateResponse201]: - """ """ + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawReferencePost): FlawReference serializer + body (FlawReferencePost): FlawReference serializer + body (FlawReferencePost): FlawReference serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsReferencesCreateResponse201 + """ return sync_detailed( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawReferencePost, - multipart_data: FlawReferencePost, - json_body: FlawReferencePost, + body: Union[ + FlawReferencePost, + FlawReferencePost, + FlawReferencePost, + ], ) -> Response[OsidbApiV1FlawsReferencesCreateResponse201]: + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawReferencePost): FlawReference serializer + body (FlawReferencePost): FlawReference serializer + body (FlawReferencePost): FlawReference serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsReferencesCreateResponse201] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -141,25 +188,39 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, *, client: AuthenticatedClient, - form_data: FlawReferencePost, - multipart_data: FlawReferencePost, - json_body: FlawReferencePost, + body: Union[ + FlawReferencePost, + FlawReferencePost, + FlawReferencePost, + ], ) -> Optional[OsidbApiV1FlawsReferencesCreateResponse201]: - """ """ + """ + Args: + flaw_id (UUID): + bugzilla_api_key (str): + body (FlawReferencePost): FlawReference serializer + body (FlawReferencePost): FlawReference serializer + body (FlawReferencePost): FlawReference serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsReferencesCreateResponse201 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_destroy.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_destroy.py index 67a3a61..ab54b5b 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_destroy.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_destroy.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_references_destroy_response_200 import ( OsidbApiV1FlawsReferencesDestroyResponse200, ) @@ -12,59 +14,71 @@ def _get_kwargs( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/references/{id}".format( - client.base_url, - flaw_id=flaw_id, - id=id, - ) +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/references/{id}".format( + flaw_id=flaw_id, + id=id, + ), } + _kwargs["headers"] = headers + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsReferencesDestroyResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsReferencesDestroyResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsReferencesDestroyResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsReferencesDestroyResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsReferencesDestroyResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1FlawsReferencesDestroyResponse200]: + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsReferencesDestroyResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -79,16 +93,29 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1FlawsReferencesDestroyResponse200]: - """Destroy the instance and proxy the delete to Bugzilla""" + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsReferencesDestroyResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -97,12 +124,27 @@ def sync( ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Response[OsidbApiV1FlawsReferencesDestroyResponse200]: + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsReferencesDestroyResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -117,19 +159,32 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1FlawsReferencesDestroyResponse200]: - """Destroy the instance and proxy the delete to Bugzilla""" + """Destroy the instance and proxy the delete to Bugzilla + + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsReferencesDestroyResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, id=id, client=client, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_list.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_list.py index 5c3c09b..ceb942f 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_list.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_list.py @@ -1,9 +1,11 @@ import datetime -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_references_list_response_200 import ( OsidbApiV1FlawsReferencesListResponse200, ) @@ -22,8 +24,8 @@ "created_dt__lt": datetime.datetime, "created_dt__lte": datetime.datetime, "description": str, - "exclude_fields": List[str], - "include_fields": List[str], + "exclude_fields": list[str], + "include_fields": list[str], "limit": int, "offset": int, "type": OsidbApiV1FlawsReferencesListType, @@ -36,229 +38,272 @@ "updated_dt__lt": datetime.datetime, "updated_dt__lte": datetime.datetime, "url": str, - "uuid": str, + "uuid": UUID, } def _get_kwargs( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - description: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - type: Union[Unset, None, OsidbApiV1FlawsReferencesListType] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - url: Union[Unset, None, str] = UNSET, - uuid: Union[Unset, None, str] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/references".format( - client.base_url, - flaw_id=flaw_id, - ) - - headers: Dict[str, Any] = client.get_headers() - - json_created_dt: Union[Unset, None, str] = UNSET + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + description: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + type_: Union[Unset, OsidbApiV1FlawsReferencesListType] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + url_query: Union[Unset, str] = UNSET, + uuid: Union[Unset, UUID] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + json_created_dt: Union[Unset, str] = UNSET if not isinstance(created_dt, Unset): - json_created_dt = created_dt.isoformat() if created_dt else None + json_created_dt = created_dt.isoformat() + + params["created_dt"] = json_created_dt - json_created_dt_date: Union[Unset, None, str] = UNSET + json_created_dt_date: Union[Unset, str] = UNSET if not isinstance(created_dt_date, Unset): - json_created_dt_date = created_dt_date.isoformat() if created_dt_date else None + json_created_dt_date = created_dt_date.isoformat() - json_created_dt_date_gte: Union[Unset, None, str] = UNSET + params["created_dt__date"] = json_created_dt_date + + json_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_gte, Unset): - json_created_dt_date_gte = ( - created_dt_date_gte.isoformat() if created_dt_date_gte else None - ) + json_created_dt_date_gte = created_dt_date_gte.isoformat() + + params["created_dt__date__gte"] = json_created_dt_date_gte - json_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_lte, Unset): - json_created_dt_date_lte = ( - created_dt_date_lte.isoformat() if created_dt_date_lte else None - ) + json_created_dt_date_lte = created_dt_date_lte.isoformat() + + params["created_dt__date__lte"] = json_created_dt_date_lte - json_created_dt_gt: Union[Unset, None, str] = UNSET + json_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(created_dt_gt, Unset): - json_created_dt_gt = created_dt_gt.isoformat() if created_dt_gt else None + json_created_dt_gt = created_dt_gt.isoformat() - json_created_dt_gte: Union[Unset, None, str] = UNSET + params["created_dt__gt"] = json_created_dt_gt + + json_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_gte, Unset): - json_created_dt_gte = created_dt_gte.isoformat() if created_dt_gte else None + json_created_dt_gte = created_dt_gte.isoformat() + + params["created_dt__gte"] = json_created_dt_gte - json_created_dt_lt: Union[Unset, None, str] = UNSET + json_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(created_dt_lt, Unset): - json_created_dt_lt = created_dt_lt.isoformat() if created_dt_lt else None + json_created_dt_lt = created_dt_lt.isoformat() + + params["created_dt__lt"] = json_created_dt_lt - json_created_dt_lte: Union[Unset, None, str] = UNSET + json_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_lte, Unset): - json_created_dt_lte = created_dt_lte.isoformat() if created_dt_lte else None + json_created_dt_lte = created_dt_lte.isoformat() + + params["created_dt__lte"] = json_created_dt_lte + + params["description"] = description - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields + + params["limit"] = limit - json_type: Union[Unset, None, str] = UNSET - if not isinstance(type, Unset): + params["offset"] = offset - json_type = OsidbApiV1FlawsReferencesListType(type).value if type else None + json_type_: Union[Unset, str] = UNSET + if not isinstance(type_, Unset): + json_type_ = OsidbApiV1FlawsReferencesListType(type_).value - json_updated_dt: Union[Unset, None, str] = UNSET + params["type"] = json_type_ + + json_updated_dt: Union[Unset, str] = UNSET if not isinstance(updated_dt, Unset): - json_updated_dt = updated_dt.isoformat() if updated_dt else None + json_updated_dt = updated_dt.isoformat() + + params["updated_dt"] = json_updated_dt - json_updated_dt_date: Union[Unset, None, str] = UNSET + json_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(updated_dt_date, Unset): - json_updated_dt_date = updated_dt_date.isoformat() if updated_dt_date else None + json_updated_dt_date = updated_dt_date.isoformat() + + params["updated_dt__date"] = json_updated_dt_date - json_updated_dt_date_gte: Union[Unset, None, str] = UNSET + json_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_gte, Unset): - json_updated_dt_date_gte = ( - updated_dt_date_gte.isoformat() if updated_dt_date_gte else None - ) + json_updated_dt_date_gte = updated_dt_date_gte.isoformat() + + params["updated_dt__date__gte"] = json_updated_dt_date_gte - json_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_lte, Unset): - json_updated_dt_date_lte = ( - updated_dt_date_lte.isoformat() if updated_dt_date_lte else None - ) + json_updated_dt_date_lte = updated_dt_date_lte.isoformat() + + params["updated_dt__date__lte"] = json_updated_dt_date_lte - json_updated_dt_gt: Union[Unset, None, str] = UNSET + json_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(updated_dt_gt, Unset): - json_updated_dt_gt = updated_dt_gt.isoformat() if updated_dt_gt else None + json_updated_dt_gt = updated_dt_gt.isoformat() + + params["updated_dt__gt"] = json_updated_dt_gt - json_updated_dt_gte: Union[Unset, None, str] = UNSET + json_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_gte, Unset): - json_updated_dt_gte = updated_dt_gte.isoformat() if updated_dt_gte else None + json_updated_dt_gte = updated_dt_gte.isoformat() - json_updated_dt_lt: Union[Unset, None, str] = UNSET + params["updated_dt__gte"] = json_updated_dt_gte + + json_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(updated_dt_lt, Unset): - json_updated_dt_lt = updated_dt_lt.isoformat() if updated_dt_lt else None + json_updated_dt_lt = updated_dt_lt.isoformat() + + params["updated_dt__lt"] = json_updated_dt_lt - json_updated_dt_lte: Union[Unset, None, str] = UNSET + json_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_lte, Unset): - json_updated_dt_lte = updated_dt_lte.isoformat() if updated_dt_lte else None - - params: Dict[str, Any] = { - "created_dt": json_created_dt, - "created_dt__date": json_created_dt_date, - "created_dt__date__gte": json_created_dt_date_gte, - "created_dt__date__lte": json_created_dt_date_lte, - "created_dt__gt": json_created_dt_gt, - "created_dt__gte": json_created_dt_gte, - "created_dt__lt": json_created_dt_lt, - "created_dt__lte": json_created_dt_lte, - "description": description, - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - "limit": limit, - "offset": offset, - "type": json_type, - "updated_dt": json_updated_dt, - "updated_dt__date": json_updated_dt_date, - "updated_dt__date__gte": json_updated_dt_date_gte, - "updated_dt__date__lte": json_updated_dt_date_lte, - "updated_dt__gt": json_updated_dt_gt, - "updated_dt__gte": json_updated_dt_gte, - "updated_dt__lt": json_updated_dt_lt, - "updated_dt__lte": json_updated_dt_lte, - "url": url, - "uuid": uuid, - } + json_updated_dt_lte = updated_dt_lte.isoformat() + + params["updated_dt__lte"] = json_updated_dt_lte + + params["url"] = url_query + + json_uuid: Union[Unset, str] = UNSET + if not isinstance(uuid, Unset): + json_uuid = str(uuid) + + params["uuid"] = json_uuid + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/references".format( + flaw_id=flaw_id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsReferencesListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsReferencesListResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsReferencesListResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsReferencesListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsReferencesListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - description: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - type: Union[Unset, None, OsidbApiV1FlawsReferencesListType] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - url: Union[Unset, None, str] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + description: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + type_: Union[Unset, OsidbApiV1FlawsReferencesListType] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + url_query: Union[Unset, str] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Response[OsidbApiV1FlawsReferencesListResponse200]: + """ + Args: + flaw_id (UUID): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + description (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + type_ (Union[Unset, OsidbApiV1FlawsReferencesListType]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + url_query (Union[Unset, str]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsReferencesListResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, @@ -275,7 +320,7 @@ def sync_detailed( include_fields=include_fields, limit=limit, offset=offset, - type=type, + type_=type_, updated_dt=updated_dt, updated_dt_date=updated_dt_date, updated_dt_date_gte=updated_dt_date_gte, @@ -284,7 +329,7 @@ def sync_detailed( updated_dt_gte=updated_dt_gte, updated_dt_lt=updated_dt_lt, updated_dt_lte=updated_dt_lte, - url=url, + url_query=url_query, uuid=uuid, ) @@ -296,39 +341,73 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, *, client: AuthenticatedClient, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - description: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - type: Union[Unset, None, OsidbApiV1FlawsReferencesListType] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - url: Union[Unset, None, str] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + description: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + type_: Union[Unset, OsidbApiV1FlawsReferencesListType] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + url_query: Union[Unset, str] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Optional[OsidbApiV1FlawsReferencesListResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + description (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + type_ (Union[Unset, OsidbApiV1FlawsReferencesListType]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + url_query (Union[Unset, str]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsReferencesListResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -346,7 +425,7 @@ def sync( include_fields=include_fields, limit=limit, offset=offset, - type=type, + type_=type_, updated_dt=updated_dt, updated_dt_date=updated_dt_date, updated_dt_date_gte=updated_dt_date_gte, @@ -355,40 +434,76 @@ def sync( updated_dt_gte=updated_dt_gte, updated_dt_lt=updated_dt_lt, updated_dt_lte=updated_dt_lte, - url=url, + url_query=url_query, uuid=uuid, ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, *, client: AuthenticatedClient, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - description: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - type: Union[Unset, None, OsidbApiV1FlawsReferencesListType] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - url: Union[Unset, None, str] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + description: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + type_: Union[Unset, OsidbApiV1FlawsReferencesListType] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + url_query: Union[Unset, str] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Response[OsidbApiV1FlawsReferencesListResponse200]: + """ + Args: + flaw_id (UUID): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + description (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + type_ (Union[Unset, OsidbApiV1FlawsReferencesListType]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + url_query (Union[Unset, str]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsReferencesListResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, @@ -405,7 +520,7 @@ async def async_detailed( include_fields=include_fields, limit=limit, offset=offset, - type=type, + type_=type_, updated_dt=updated_dt, updated_dt_date=updated_dt_date, updated_dt_date_gte=updated_dt_date_gte, @@ -414,7 +529,7 @@ async def async_detailed( updated_dt_gte=updated_dt_gte, updated_dt_lt=updated_dt_lt, updated_dt_lte=updated_dt_lte, - url=url, + url_query=url_query, uuid=uuid, ) @@ -426,42 +541,76 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, *, client: AuthenticatedClient, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - description: Union[Unset, None, str] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - type: Union[Unset, None, OsidbApiV1FlawsReferencesListType] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - url: Union[Unset, None, str] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + description: Union[Unset, str] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + type_: Union[Unset, OsidbApiV1FlawsReferencesListType] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + url_query: Union[Unset, str] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Optional[OsidbApiV1FlawsReferencesListResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + description (Union[Unset, str]): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + type_ (Union[Unset, OsidbApiV1FlawsReferencesListType]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + url_query (Union[Unset, str]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsReferencesListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, client=client, created_dt=created_dt, @@ -477,7 +626,7 @@ async def async_( include_fields=include_fields, limit=limit, offset=offset, - type=type, + type_=type_, updated_dt=updated_dt, updated_dt_date=updated_dt_date, updated_dt_date_gte=updated_dt_date_gte, @@ -486,7 +635,7 @@ async def async_( updated_dt_gte=updated_dt_gte, updated_dt_lt=updated_dt_lt, updated_dt_lte=updated_dt_lte, - url=url, + url_query=url_query, uuid=uuid, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_retrieve.py index 5d1543d..51f6fd3 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_retrieve.py @@ -1,98 +1,105 @@ -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_references_retrieve_response_200 import ( OsidbApiV1FlawsReferencesRetrieveResponse200, ) from ...types import UNSET, Response, Unset QUERY_PARAMS = { - "exclude_fields": List[str], - "include_fields": List[str], + "exclude_fields": list[str], + "include_fields": list[str], } def _get_kwargs( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/references/{id}".format( - client.base_url, - flaw_id=flaw_id, - id=id, - ) + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - headers: Dict[str, Any] = client.get_headers() - - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields - params: Dict[str, Any] = { - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/references/{id}".format( + flaw_id=flaw_id, + id=id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsReferencesRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsReferencesRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsReferencesRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsReferencesRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsReferencesRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1FlawsReferencesRetrieveResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsReferencesRetrieveResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -109,18 +116,31 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1FlawsReferencesRetrieveResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsReferencesRetrieveResponse200 + """ return sync_detailed( flaw_id=flaw_id, @@ -131,14 +151,29 @@ def sync( ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1FlawsReferencesRetrieveResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsReferencesRetrieveResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, @@ -155,21 +190,34 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1FlawsReferencesRetrieveResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsReferencesRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, id=id, client=client, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_update.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_update.py index 6da1ceb..d221701 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_update.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_references_update.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.flaw_reference_put import FlawReferencePut from ...models.osidb_api_v1_flaws_references_update_response_200 import ( OsidbApiV1FlawsReferencesUpdateResponse200, @@ -10,85 +12,101 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = FlawReferencePut def _get_kwargs( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawReferencePut, - multipart_data: FlawReferencePut, - json_body: FlawReferencePut, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/references/{id}".format( - client.base_url, - flaw_id=flaw_id, - id=id, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + FlawReferencePut, + FlawReferencePut, + FlawReferencePut, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/references/{id}".format( + flaw_id=flaw_id, + id=id, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, FlawReferencePut): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsReferencesUpdateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsReferencesUpdateResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsReferencesUpdateResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsReferencesUpdateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsReferencesUpdateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawReferencePut, - multipart_data: FlawReferencePut, - json_body: FlawReferencePut, + body: Union[ + FlawReferencePut, + FlawReferencePut, + FlawReferencePut, + ], ) -> Response[OsidbApiV1FlawsReferencesUpdateResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawReferencePut): FlawReference serializer + body (FlawReferencePut): FlawReference serializer + body (FlawReferencePut): FlawReference serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsReferencesUpdateResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.put( @@ -99,46 +117,78 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - flaw_id: str, + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawReferencePut, - multipart_data: FlawReferencePut, - json_body: FlawReferencePut, + body: Union[ + FlawReferencePut, + FlawReferencePut, + FlawReferencePut, + ], ) -> Optional[OsidbApiV1FlawsReferencesUpdateResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawReferencePut): FlawReference serializer + body (FlawReferencePut): FlawReference serializer + body (FlawReferencePut): FlawReference serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsReferencesUpdateResponse200 + """ return sync_detailed( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( - flaw_id: str, +async def asyncio_detailed( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawReferencePut, - multipart_data: FlawReferencePut, - json_body: FlawReferencePut, + body: Union[ + FlawReferencePut, + FlawReferencePut, + FlawReferencePut, + ], ) -> Response[OsidbApiV1FlawsReferencesUpdateResponse200]: + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawReferencePut): FlawReference serializer + body (FlawReferencePut): FlawReference serializer + body (FlawReferencePut): FlawReference serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsReferencesUpdateResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().put( @@ -149,27 +199,42 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - flaw_id: str, +async def asyncio( + flaw_id: UUID, id: str, *, client: AuthenticatedClient, - form_data: FlawReferencePut, - multipart_data: FlawReferencePut, - json_body: FlawReferencePut, + body: Union[ + FlawReferencePut, + FlawReferencePut, + FlawReferencePut, + ], ) -> Optional[OsidbApiV1FlawsReferencesUpdateResponse200]: - """ """ + """ + Args: + flaw_id (UUID): + id (str): + bugzilla_api_key (str): + body (FlawReferencePut): FlawReference serializer + body (FlawReferencePut): FlawReference serializer + body (FlawReferencePut): FlawReference serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsReferencesUpdateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_reject_create.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_reject_create.py index 1a66793..1adb671 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_reject_create.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_reject_create.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_reject_create_response_200 import ( OsidbApiV1FlawsRejectCreateResponse200, ) @@ -10,6 +11,7 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = Reject @@ -17,57 +19,55 @@ def _get_kwargs( flaw_id: str, *, client: AuthenticatedClient, - form_data: Reject, - multipart_data: Reject, - json_body: Reject, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{flaw_id}/reject".format( - client.base_url, - flaw_id=flaw_id, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + Reject, + Reject, + Reject, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{flaw_id}/reject".format( + flaw_id=flaw_id, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, Reject): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsRejectCreateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsRejectCreateResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1FlawsRejectCreateResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1FlawsRejectCreateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsRejectCreateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -75,16 +75,36 @@ def sync_detailed( flaw_id: str, *, client: AuthenticatedClient, - form_data: Reject, - multipart_data: Reject, - json_body: Reject, + body: Union[ + Reject, + Reject, + Reject, + ], ) -> Response[OsidbApiV1FlawsRejectCreateResponse200]: + """workflow promotion API endpoint + + try to reject a flaw / task + + Args: + flaw_id (str): + bugzilla_api_key (str): + jira_api_key (str): + body (Reject): Task rejection serializer + body (Reject): Task rejection serializer + body (Reject): Task rejection serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsRejectCreateResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -95,44 +115,80 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( flaw_id: str, *, client: AuthenticatedClient, - form_data: Reject, - multipart_data: Reject, - json_body: Reject, + body: Union[ + Reject, + Reject, + Reject, + ], ) -> Optional[OsidbApiV1FlawsRejectCreateResponse200]: """workflow promotion API endpoint - try to reject a flaw / task""" + try to reject a flaw / task + + Args: + flaw_id (str): + bugzilla_api_key (str): + jira_api_key (str): + body (Reject): Task rejection serializer + body (Reject): Task rejection serializer + body (Reject): Task rejection serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsRejectCreateResponse200 + """ return sync_detailed( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( +async def asyncio_detailed( flaw_id: str, *, client: AuthenticatedClient, - form_data: Reject, - multipart_data: Reject, - json_body: Reject, + body: Union[ + Reject, + Reject, + Reject, + ], ) -> Response[OsidbApiV1FlawsRejectCreateResponse200]: + """workflow promotion API endpoint + + try to reject a flaw / task + + Args: + flaw_id (str): + bugzilla_api_key (str): + jira_api_key (str): + body (Reject): Task rejection serializer + body (Reject): Task rejection serializer + body (Reject): Task rejection serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsRejectCreateResponse200] + """ + kwargs = _get_kwargs( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -143,27 +199,43 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( flaw_id: str, *, client: AuthenticatedClient, - form_data: Reject, - multipart_data: Reject, - json_body: Reject, + body: Union[ + Reject, + Reject, + Reject, + ], ) -> Optional[OsidbApiV1FlawsRejectCreateResponse200]: """workflow promotion API endpoint - try to reject a flaw / task""" + try to reject a flaw / task + + Args: + flaw_id (str): + bugzilla_api_key (str): + jira_api_key (str): + body (Reject): Task rejection serializer + body (Reject): Task rejection serializer + body (Reject): Task rejection serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsRejectCreateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( flaw_id=flaw_id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_retrieve.py index db22c86..78e0cda 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_retrieve.py @@ -1,19 +1,20 @@ -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_flaws_retrieve_response_200 import ( OsidbApiV1FlawsRetrieveResponse200, ) from ...types import UNSET, Response, Unset QUERY_PARAMS = { - "exclude_fields": List[str], - "include_fields": List[str], - "include_meta_attr": List[str], + "exclude_fields": list[str], + "include_fields": list[str], + "include_meta_attr": list[str], "query": str, - "tracker_ids": List[str], + "tracker_ids": list[str], } @@ -21,67 +22,57 @@ def _get_kwargs( id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - query: Union[Unset, None, str] = UNSET, - tracker_ids: Union[Unset, None, List[str]] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{id}".format( - client.base_url, - id=id, - ) + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + query: Union[Unset, str] = UNSET, + tracker_ids: Union[Unset, list[str]] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - headers: Dict[str, Any] = client.get_headers() - - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + params["exclude_fields"] = json_exclude_fields + + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields + + params["include_fields"] = json_include_fields - json_include_meta_attr: Union[Unset, None, List[str]] = UNSET + json_include_meta_attr: Union[Unset, list[str]] = UNSET if not isinstance(include_meta_attr, Unset): - if include_meta_attr is None: - json_include_meta_attr = None - else: - json_include_meta_attr = include_meta_attr + json_include_meta_attr = include_meta_attr + + params["include_meta_attr"] = json_include_meta_attr + + params["query"] = query - json_tracker_ids: Union[Unset, None, List[str]] = UNSET + json_tracker_ids: Union[Unset, list[str]] = UNSET if not isinstance(tracker_ids, Unset): - if tracker_ids is None: - json_tracker_ids = None - else: - json_tracker_ids = tracker_ids - - params: Dict[str, Any] = { - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - "include_meta_attr": json_include_meta_attr, - "query": query, - "tracker_ids": json_tracker_ids, - } + json_tracker_ids = tracker_ids + + params["tracker_ids"] = json_tracker_ids + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{id}".format( + id=id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsRetrieveResponse200 if isinstance(_response_200, Unset): @@ -90,17 +81,16 @@ def _parse_response( response_200 = OsidbApiV1FlawsRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -108,12 +98,29 @@ def sync_detailed( id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - query: Union[Unset, None, str] = UNSET, - tracker_ids: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + query: Union[Unset, str] = UNSET, + tracker_ids: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1FlawsRetrieveResponse200]: + """ + Args: + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + query (Union[Unset, str]): + tracker_ids (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsRetrieveResponse200] + """ + kwargs = _get_kwargs( id=id, client=client, @@ -132,20 +139,35 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - query: Union[Unset, None, str] = UNSET, - tracker_ids: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + query: Union[Unset, str] = UNSET, + tracker_ids: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1FlawsRetrieveResponse200]: - """ """ + """ + Args: + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + query (Union[Unset, str]): + tracker_ids (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsRetrieveResponse200 + """ return sync_detailed( id=id, @@ -158,16 +180,33 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - query: Union[Unset, None, str] = UNSET, - tracker_ids: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + query: Union[Unset, str] = UNSET, + tracker_ids: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1FlawsRetrieveResponse200]: + """ + Args: + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + query (Union[Unset, str]): + tracker_ids (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsRetrieveResponse200] + """ + kwargs = _get_kwargs( id=id, client=client, @@ -186,23 +225,38 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( id: str, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - query: Union[Unset, None, str] = UNSET, - tracker_ids: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + query: Union[Unset, str] = UNSET, + tracker_ids: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1FlawsRetrieveResponse200]: - """ """ + """ + Args: + id (str): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + query (Union[Unset, str]): + tracker_ids (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( id=id, client=client, exclude_fields=exclude_fields, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_update.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_update.py index 25eb5de..a8d4660 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_update.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_flaws_update.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.flaw import Flaw from ...models.osidb_api_v1_flaws_update_response_200 import ( OsidbApiV1FlawsUpdateResponse200, @@ -12,6 +13,7 @@ QUERY_PARAMS = { "create_jira_task": bool, } + REQUEST_BODY_TYPE = Flaw @@ -19,43 +21,45 @@ def _get_kwargs( id: str, *, client: AuthenticatedClient, - form_data: Flaw, - multipart_data: Flaw, - json_body: Flaw, - create_jira_task: Union[Unset, None, bool] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/flaws/{id}".format( - client.base_url, - id=id, - ) + body: Union[ + Flaw, + Flaw, + Flaw, + ], + create_jira_task: Union[Unset, bool] = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() - headers: Dict[str, Any] = client.get_headers() + params: dict[str, Any] = {} - params: Dict[str, Any] = { - "create_jira_task": create_jira_task, - } - params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + params["create_jira_task"] = create_jira_task - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/flaws/{id}".format( + id=id, + ), "params": params, } + if isinstance(body, Flaw): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() + + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1FlawsUpdateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1FlawsUpdateResponse200 if isinstance(_response_200, Unset): @@ -64,17 +68,16 @@ def _parse_response( response_200 = OsidbApiV1FlawsUpdateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1FlawsUpdateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -82,17 +85,35 @@ def sync_detailed( id: str, *, client: AuthenticatedClient, - form_data: Flaw, - multipart_data: Flaw, - json_body: Flaw, - create_jira_task: Union[Unset, None, bool] = UNSET, + body: Union[ + Flaw, + Flaw, + Flaw, + ], + create_jira_task: Union[Unset, bool] = UNSET, ) -> Response[OsidbApiV1FlawsUpdateResponse200]: + """ + Args: + id (str): + create_jira_task (Union[Unset, bool]): + bugzilla_api_key (str): + jira_api_key (str): + body (Flaw): serialize flaw model + body (Flaw): serialize flaw model + body (Flaw): serialize flaw model + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsUpdateResponse200] + """ + kwargs = _get_kwargs( id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, create_jira_task=create_jira_task, ) @@ -104,45 +125,79 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( id: str, *, client: AuthenticatedClient, - form_data: Flaw, - multipart_data: Flaw, - json_body: Flaw, - create_jira_task: Union[Unset, None, bool] = UNSET, + body: Union[ + Flaw, + Flaw, + Flaw, + ], + create_jira_task: Union[Unset, bool] = UNSET, ) -> Optional[OsidbApiV1FlawsUpdateResponse200]: - """ """ + """ + Args: + id (str): + create_jira_task (Union[Unset, bool]): + bugzilla_api_key (str): + jira_api_key (str): + body (Flaw): serialize flaw model + body (Flaw): serialize flaw model + body (Flaw): serialize flaw model + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsUpdateResponse200 + """ return sync_detailed( id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, create_jira_task=create_jira_task, ).parsed -async def async_detailed( +async def asyncio_detailed( id: str, *, client: AuthenticatedClient, - form_data: Flaw, - multipart_data: Flaw, - json_body: Flaw, - create_jira_task: Union[Unset, None, bool] = UNSET, + body: Union[ + Flaw, + Flaw, + Flaw, + ], + create_jira_task: Union[Unset, bool] = UNSET, ) -> Response[OsidbApiV1FlawsUpdateResponse200]: + """ + Args: + id (str): + create_jira_task (Union[Unset, bool]): + bugzilla_api_key (str): + jira_api_key (str): + body (Flaw): serialize flaw model + body (Flaw): serialize flaw model + body (Flaw): serialize flaw model + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1FlawsUpdateResponse200] + """ + kwargs = _get_kwargs( id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, create_jira_task=create_jira_task, ) @@ -154,27 +209,43 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( id: str, *, client: AuthenticatedClient, - form_data: Flaw, - multipart_data: Flaw, - json_body: Flaw, - create_jira_task: Union[Unset, None, bool] = UNSET, + body: Union[ + Flaw, + Flaw, + Flaw, + ], + create_jira_task: Union[Unset, bool] = UNSET, ) -> Optional[OsidbApiV1FlawsUpdateResponse200]: - """ """ + """ + Args: + id (str): + create_jira_task (Union[Unset, bool]): + bugzilla_api_key (str): + jira_api_key (str): + body (Flaw): serialize flaw model + body (Flaw): serialize flaw model + body (Flaw): serialize flaw model + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1FlawsUpdateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( id=id, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, create_jira_task=create_jira_task, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_manifest_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_manifest_retrieve.py index 2378803..ea73cbb 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_manifest_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_manifest_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_manifest_retrieve_response_200 import ( OsidbApiV1ManifestRetrieveResponse200, ) @@ -14,44 +15,37 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/manifest".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/manifest", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1ManifestRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1ManifestRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1ManifestRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1ManifestRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1ManifestRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -59,6 +53,16 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[OsidbApiV1ManifestRetrieveResponse200]: + """HTTP get /manifest + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1ManifestRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -71,24 +75,42 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1ManifestRetrieveResponse200]: - """HTTP get /manifest""" + """HTTP get /manifest + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1ManifestRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[OsidbApiV1ManifestRetrieveResponse200]: + """HTTP get /manifest + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1ManifestRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -101,17 +123,25 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1ManifestRetrieveResponse200]: - """HTTP get /manifest""" + """HTTP get /manifest + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1ManifestRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_schema_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_schema_retrieve.py index 19afe12..2d60cf2 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_schema_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_schema_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_schema_retrieve_format import OsidbApiV1SchemaRetrieveFormat from ...models.osidb_api_v1_schema_retrieve_lang import OsidbApiV1SchemaRetrieveLang from ...models.osidb_api_v1_schema_retrieve_response_200 import ( @@ -19,44 +20,38 @@ def _get_kwargs( *, client: AuthenticatedClient, - format_: Union[Unset, None, OsidbApiV1SchemaRetrieveFormat] = UNSET, - lang: Union[Unset, None, OsidbApiV1SchemaRetrieveLang] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/schema/".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() + format_: Union[Unset, OsidbApiV1SchemaRetrieveFormat] = UNSET, + lang: Union[Unset, OsidbApiV1SchemaRetrieveLang] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - json_format_: Union[Unset, None, str] = UNSET + json_format_: Union[Unset, str] = UNSET if not isinstance(format_, Unset): + json_format_ = OsidbApiV1SchemaRetrieveFormat(format_).value - json_format_ = ( - OsidbApiV1SchemaRetrieveFormat(format_).value if format_ else None - ) + params["format"] = json_format_ - json_lang: Union[Unset, None, str] = UNSET + json_lang: Union[Unset, str] = UNSET if not isinstance(lang, Unset): + json_lang = OsidbApiV1SchemaRetrieveLang(lang).value - json_lang = OsidbApiV1SchemaRetrieveLang(lang).value if lang else None + params["lang"] = json_lang - params: Dict[str, Any] = { - "format": json_format_, - "lang": json_lang, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/schema/", "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1SchemaRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1SchemaRetrieveResponse200 if isinstance(_response_200, Unset): @@ -65,26 +60,42 @@ def _parse_response( response_200 = OsidbApiV1SchemaRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1SchemaRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - format_: Union[Unset, None, OsidbApiV1SchemaRetrieveFormat] = UNSET, - lang: Union[Unset, None, OsidbApiV1SchemaRetrieveLang] = UNSET, + format_: Union[Unset, OsidbApiV1SchemaRetrieveFormat] = UNSET, + lang: Union[Unset, OsidbApiV1SchemaRetrieveLang] = UNSET, ) -> Response[OsidbApiV1SchemaRetrieveResponse200]: + """OpenApi3 schema for this API. Format can be selected via content negotiation. + + - YAML: application/vnd.oai.openapi + - JSON: application/vnd.oai.openapi+json + + Args: + format_ (Union[Unset, OsidbApiV1SchemaRetrieveFormat]): + lang (Union[Unset, OsidbApiV1SchemaRetrieveLang]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1SchemaRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, format_=format_, @@ -99,19 +110,31 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - format_: Union[Unset, None, OsidbApiV1SchemaRetrieveFormat] = UNSET, - lang: Union[Unset, None, OsidbApiV1SchemaRetrieveLang] = UNSET, + format_: Union[Unset, OsidbApiV1SchemaRetrieveFormat] = UNSET, + lang: Union[Unset, OsidbApiV1SchemaRetrieveLang] = UNSET, ) -> Optional[OsidbApiV1SchemaRetrieveResponse200]: """OpenApi3 schema for this API. Format can be selected via content negotiation. - YAML: application/vnd.oai.openapi - - JSON: application/vnd.oai.openapi+json""" + - JSON: application/vnd.oai.openapi+json + + Args: + format_ (Union[Unset, OsidbApiV1SchemaRetrieveFormat]): + lang (Union[Unset, OsidbApiV1SchemaRetrieveLang]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1SchemaRetrieveResponse200 + """ return sync_detailed( client=client, @@ -120,12 +143,29 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - format_: Union[Unset, None, OsidbApiV1SchemaRetrieveFormat] = UNSET, - lang: Union[Unset, None, OsidbApiV1SchemaRetrieveLang] = UNSET, + format_: Union[Unset, OsidbApiV1SchemaRetrieveFormat] = UNSET, + lang: Union[Unset, OsidbApiV1SchemaRetrieveLang] = UNSET, ) -> Response[OsidbApiV1SchemaRetrieveResponse200]: + """OpenApi3 schema for this API. Format can be selected via content negotiation. + + - YAML: application/vnd.oai.openapi + - JSON: application/vnd.oai.openapi+json + + Args: + format_ (Union[Unset, OsidbApiV1SchemaRetrieveFormat]): + lang (Union[Unset, OsidbApiV1SchemaRetrieveLang]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1SchemaRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, format_=format_, @@ -140,22 +180,34 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - format_: Union[Unset, None, OsidbApiV1SchemaRetrieveFormat] = UNSET, - lang: Union[Unset, None, OsidbApiV1SchemaRetrieveLang] = UNSET, + format_: Union[Unset, OsidbApiV1SchemaRetrieveFormat] = UNSET, + lang: Union[Unset, OsidbApiV1SchemaRetrieveLang] = UNSET, ) -> Optional[OsidbApiV1SchemaRetrieveResponse200]: """OpenApi3 schema for this API. Format can be selected via content negotiation. - YAML: application/vnd.oai.openapi - - JSON: application/vnd.oai.openapi+json""" + - JSON: application/vnd.oai.openapi+json + + Args: + format_ (Union[Unset, OsidbApiV1SchemaRetrieveFormat]): + lang (Union[Unset, OsidbApiV1SchemaRetrieveLang]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1SchemaRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, format_=format_, lang=lang, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_status_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_status_retrieve.py index 10a3dba..f454b49 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_status_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_status_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_status_retrieve_response_200 import ( OsidbApiV1StatusRetrieveResponse200, ) @@ -14,23 +15,19 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/status".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/status", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1StatusRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1StatusRetrieveResponse200 if isinstance(_response_200, Unset): @@ -39,17 +36,16 @@ def _parse_response( response_200 = OsidbApiV1StatusRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1StatusRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -57,6 +53,16 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[OsidbApiV1StatusRetrieveResponse200]: + """HTTP get /status + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1StatusRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -69,24 +75,42 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1StatusRetrieveResponse200]: - """HTTP get /status""" + """HTTP get /status + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1StatusRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[OsidbApiV1StatusRetrieveResponse200]: + """HTTP get /status + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1StatusRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -99,17 +123,25 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[OsidbApiV1StatusRetrieveResponse200]: - """HTTP get /status""" + """HTTP get /status + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1StatusRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_create.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_create.py index 0d42c68..83681a2 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_create.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_create.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_trackers_create_response_201 import ( OsidbApiV1TrackersCreateResponse201, ) @@ -10,41 +11,42 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = TrackerPost def _get_kwargs( *, client: AuthenticatedClient, - form_data: TrackerPost, - multipart_data: TrackerPost, - json_body: TrackerPost, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/trackers".format( - client.base_url, - ) + body: Union[ + TrackerPost, + TrackerPost, + TrackerPost, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/trackers", + } - headers: Dict[str, Any] = client.get_headers() + if isinstance(body, TrackerPost): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() - - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1TrackersCreateResponse201]: if response.status_code == 201: + # } _response_201 = response.json() response_201: OsidbApiV1TrackersCreateResponse201 if isinstance(_response_201, Unset): @@ -53,32 +55,47 @@ def _parse_response( response_201 = OsidbApiV1TrackersCreateResponse201.from_dict(_response_201) return response_201 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1TrackersCreateResponse201]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - form_data: TrackerPost, - multipart_data: TrackerPost, - json_body: TrackerPost, + body: Union[ + TrackerPost, + TrackerPost, + TrackerPost, + ], ) -> Response[OsidbApiV1TrackersCreateResponse201]: + """ + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (TrackerPost): Tracker serializer + body (TrackerPost): Tracker serializer + body (TrackerPost): Tracker serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1TrackersCreateResponse201] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -89,38 +106,68 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - form_data: TrackerPost, - multipart_data: TrackerPost, - json_body: TrackerPost, + body: Union[ + TrackerPost, + TrackerPost, + TrackerPost, + ], ) -> Optional[OsidbApiV1TrackersCreateResponse201]: - """ """ + """ + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (TrackerPost): Tracker serializer + body (TrackerPost): Tracker serializer + body (TrackerPost): Tracker serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1TrackersCreateResponse201 + """ return sync_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - form_data: TrackerPost, - multipart_data: TrackerPost, - json_body: TrackerPost, + body: Union[ + TrackerPost, + TrackerPost, + TrackerPost, + ], ) -> Response[OsidbApiV1TrackersCreateResponse201]: + """ + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (TrackerPost): Tracker serializer + body (TrackerPost): Tracker serializer + body (TrackerPost): Tracker serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1TrackersCreateResponse201] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -131,23 +178,37 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - form_data: TrackerPost, - multipart_data: TrackerPost, - json_body: TrackerPost, + body: Union[ + TrackerPost, + TrackerPost, + TrackerPost, + ], ) -> Optional[OsidbApiV1TrackersCreateResponse201]: - """ """ + """ + Args: + bugzilla_api_key (str): + jira_api_key (str): + body (TrackerPost): Tracker serializer + body (TrackerPost): Tracker serializer + body (TrackerPost): Tracker serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1TrackersCreateResponse201 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_list.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_list.py index fe2191c..77d7412 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_list.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_list.py @@ -1,9 +1,11 @@ import datetime -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_trackers_list_affects_affectedness import ( OsidbApiV1TrackersListAffectsAffectedness, ) @@ -39,7 +41,7 @@ "affects__created_dt__lt": datetime.datetime, "affects__created_dt__lte": datetime.datetime, "affects__embargoed": bool, - "affects__flaw__components": List[str], + "affects__flaw__components": list[str], "affects__flaw__created_dt": datetime.datetime, "affects__flaw__created_dt__date": datetime.date, "affects__flaw__created_dt__date__gte": datetime.date, @@ -70,7 +72,7 @@ "affects__flaw__updated_dt__gte": datetime.datetime, "affects__flaw__updated_dt__lt": datetime.datetime, "affects__flaw__updated_dt__lte": datetime.datetime, - "affects__flaw__uuid": str, + "affects__flaw__uuid": UUID, "affects__impact": OsidbApiV1TrackersListAffectsImpact, "affects__ps_component": str, "affects__ps_module": str, @@ -83,7 +85,7 @@ "affects__updated_dt__gte": datetime.datetime, "affects__updated_dt__lt": datetime.datetime, "affects__updated_dt__lte": datetime.datetime, - "affects__uuid": str, + "affects__uuid": UUID, "created_dt": datetime.datetime, "created_dt__date": datetime.date, "created_dt__date__gte": datetime.date, @@ -93,13 +95,13 @@ "created_dt__lt": datetime.datetime, "created_dt__lte": datetime.datetime, "embargoed": bool, - "exclude_fields": List[str], + "exclude_fields": list[str], "external_system_id": str, - "include_fields": List[str], - "include_meta_attr": List[str], + "include_fields": list[str], + "include_meta_attr": list[str], "limit": int, "offset": int, - "order": List[OsidbApiV1TrackersListOrderItem], + "order": list[OsidbApiV1TrackersListOrderItem], "ps_update_stream": str, "resolution": str, "status": str, @@ -112,671 +114,573 @@ "updated_dt__gte": datetime.datetime, "updated_dt__lt": datetime.datetime, "updated_dt__lte": datetime.datetime, - "uuid": str, + "uuid": UUID, } def _get_kwargs( *, client: AuthenticatedClient, - affects_affectedness: Union[ - Unset, None, OsidbApiV1TrackersListAffectsAffectedness - ] = UNSET, - affects_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_embargoed: Union[Unset, None, bool] = UNSET, - affects_flaw_components: Union[Unset, None, List[str]] = UNSET, - affects_flaw_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_cve_id: Union[Unset, None, str] = UNSET, - affects_flaw_cwe_id: Union[Unset, None, str] = UNSET, - affects_flaw_embargoed: Union[Unset, None, bool] = UNSET, - affects_flaw_impact: Union[ - Unset, None, OsidbApiV1TrackersListAffectsFlawImpact - ] = UNSET, - affects_flaw_reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_source: Union[ - Unset, None, OsidbApiV1TrackersListAffectsFlawSource - ] = UNSET, - affects_flaw_unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_uuid: Union[Unset, None, str] = UNSET, - affects_impact: Union[Unset, None, OsidbApiV1TrackersListAffectsImpact] = UNSET, - affects_ps_component: Union[Unset, None, str] = UNSET, - affects_ps_module: Union[Unset, None, str] = UNSET, - affects_resolution: Union[ - Unset, None, OsidbApiV1TrackersListAffectsResolution - ] = UNSET, - affects_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_uuid: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - external_system_id: Union[Unset, None, str] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1TrackersListOrderItem]] = UNSET, - ps_update_stream: Union[Unset, None, str] = UNSET, - resolution: Union[Unset, None, str] = UNSET, - status: Union[Unset, None, str] = UNSET, - type: Union[Unset, None, OsidbApiV1TrackersListType] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/trackers".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - json_affects_affectedness: Union[Unset, None, str] = UNSET + affects_affectedness: Union[Unset, OsidbApiV1TrackersListAffectsAffectedness] = UNSET, + affects_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_embargoed: Union[Unset, bool] = UNSET, + affects_flaw_components: Union[Unset, list[str]] = UNSET, + affects_flaw_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_cve_id: Union[Unset, str] = UNSET, + affects_flaw_cwe_id: Union[Unset, str] = UNSET, + affects_flaw_embargoed: Union[Unset, bool] = UNSET, + affects_flaw_impact: Union[Unset, OsidbApiV1TrackersListAffectsFlawImpact] = UNSET, + affects_flaw_reported_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_source: Union[Unset, OsidbApiV1TrackersListAffectsFlawSource] = UNSET, + affects_flaw_unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_uuid: Union[Unset, UUID] = UNSET, + affects_impact: Union[Unset, OsidbApiV1TrackersListAffectsImpact] = UNSET, + affects_ps_component: Union[Unset, str] = UNSET, + affects_ps_module: Union[Unset, str] = UNSET, + affects_resolution: Union[Unset, OsidbApiV1TrackersListAffectsResolution] = UNSET, + affects_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_uuid: Union[Unset, UUID] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + external_system_id: Union[Unset, str] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1TrackersListOrderItem]] = UNSET, + ps_update_stream: Union[Unset, str] = UNSET, + resolution: Union[Unset, str] = UNSET, + status: Union[Unset, str] = UNSET, + type_: Union[Unset, OsidbApiV1TrackersListType] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + json_affects_affectedness: Union[Unset, str] = UNSET if not isinstance(affects_affectedness, Unset): + json_affects_affectedness = OsidbApiV1TrackersListAffectsAffectedness(affects_affectedness).value - json_affects_affectedness = ( - OsidbApiV1TrackersListAffectsAffectedness(affects_affectedness).value - if affects_affectedness - else None - ) + params["affects__affectedness"] = json_affects_affectedness - json_affects_created_dt: Union[Unset, None, str] = UNSET + json_affects_created_dt: Union[Unset, str] = UNSET if not isinstance(affects_created_dt, Unset): - json_affects_created_dt = ( - affects_created_dt.isoformat() if affects_created_dt else None - ) + json_affects_created_dt = affects_created_dt.isoformat() - json_affects_created_dt_date: Union[Unset, None, str] = UNSET + params["affects__created_dt"] = json_affects_created_dt + + json_affects_created_dt_date: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_date, Unset): - json_affects_created_dt_date = ( - affects_created_dt_date.isoformat() if affects_created_dt_date else None - ) + json_affects_created_dt_date = affects_created_dt_date.isoformat() - json_affects_created_dt_date_gte: Union[Unset, None, str] = UNSET + params["affects__created_dt__date"] = json_affects_created_dt_date + + json_affects_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_date_gte, Unset): - json_affects_created_dt_date_gte = ( - affects_created_dt_date_gte.isoformat() - if affects_created_dt_date_gte - else None - ) + json_affects_created_dt_date_gte = affects_created_dt_date_gte.isoformat() - json_affects_created_dt_date_lte: Union[Unset, None, str] = UNSET + params["affects__created_dt__date__gte"] = json_affects_created_dt_date_gte + + json_affects_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_date_lte, Unset): - json_affects_created_dt_date_lte = ( - affects_created_dt_date_lte.isoformat() - if affects_created_dt_date_lte - else None - ) + json_affects_created_dt_date_lte = affects_created_dt_date_lte.isoformat() + + params["affects__created_dt__date__lte"] = json_affects_created_dt_date_lte - json_affects_created_dt_gt: Union[Unset, None, str] = UNSET + json_affects_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_gt, Unset): - json_affects_created_dt_gt = ( - affects_created_dt_gt.isoformat() if affects_created_dt_gt else None - ) + json_affects_created_dt_gt = affects_created_dt_gt.isoformat() + + params["affects__created_dt__gt"] = json_affects_created_dt_gt - json_affects_created_dt_gte: Union[Unset, None, str] = UNSET + json_affects_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_gte, Unset): - json_affects_created_dt_gte = ( - affects_created_dt_gte.isoformat() if affects_created_dt_gte else None - ) + json_affects_created_dt_gte = affects_created_dt_gte.isoformat() + + params["affects__created_dt__gte"] = json_affects_created_dt_gte - json_affects_created_dt_lt: Union[Unset, None, str] = UNSET + json_affects_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_lt, Unset): - json_affects_created_dt_lt = ( - affects_created_dt_lt.isoformat() if affects_created_dt_lt else None - ) + json_affects_created_dt_lt = affects_created_dt_lt.isoformat() + + params["affects__created_dt__lt"] = json_affects_created_dt_lt - json_affects_created_dt_lte: Union[Unset, None, str] = UNSET + json_affects_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(affects_created_dt_lte, Unset): - json_affects_created_dt_lte = ( - affects_created_dt_lte.isoformat() if affects_created_dt_lte else None - ) + json_affects_created_dt_lte = affects_created_dt_lte.isoformat() + + params["affects__created_dt__lte"] = json_affects_created_dt_lte + + params["affects__embargoed"] = affects_embargoed - json_affects_flaw_components: Union[Unset, None, List[str]] = UNSET + json_affects_flaw_components: Union[Unset, list[str]] = UNSET if not isinstance(affects_flaw_components, Unset): - if affects_flaw_components is None: - json_affects_flaw_components = None - else: - json_affects_flaw_components = affects_flaw_components + json_affects_flaw_components = affects_flaw_components + + params["affects__flaw__components"] = json_affects_flaw_components - json_affects_flaw_created_dt: Union[Unset, None, str] = UNSET + json_affects_flaw_created_dt: Union[Unset, str] = UNSET if not isinstance(affects_flaw_created_dt, Unset): - json_affects_flaw_created_dt = ( - affects_flaw_created_dt.isoformat() if affects_flaw_created_dt else None - ) + json_affects_flaw_created_dt = affects_flaw_created_dt.isoformat() + + params["affects__flaw__created_dt"] = json_affects_flaw_created_dt - json_affects_flaw_created_dt_date: Union[Unset, None, str] = UNSET + json_affects_flaw_created_dt_date: Union[Unset, str] = UNSET if not isinstance(affects_flaw_created_dt_date, Unset): - json_affects_flaw_created_dt_date = ( - affects_flaw_created_dt_date.isoformat() - if affects_flaw_created_dt_date - else None - ) + json_affects_flaw_created_dt_date = affects_flaw_created_dt_date.isoformat() + + params["affects__flaw__created_dt__date"] = json_affects_flaw_created_dt_date - json_affects_flaw_created_dt_date_gte: Union[Unset, None, str] = UNSET + json_affects_flaw_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(affects_flaw_created_dt_date_gte, Unset): - json_affects_flaw_created_dt_date_gte = ( - affects_flaw_created_dt_date_gte.isoformat() - if affects_flaw_created_dt_date_gte - else None - ) + json_affects_flaw_created_dt_date_gte = affects_flaw_created_dt_date_gte.isoformat() + + params["affects__flaw__created_dt__date__gte"] = json_affects_flaw_created_dt_date_gte - json_affects_flaw_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_affects_flaw_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(affects_flaw_created_dt_date_lte, Unset): - json_affects_flaw_created_dt_date_lte = ( - affects_flaw_created_dt_date_lte.isoformat() - if affects_flaw_created_dt_date_lte - else None - ) + json_affects_flaw_created_dt_date_lte = affects_flaw_created_dt_date_lte.isoformat() - json_affects_flaw_created_dt_gt: Union[Unset, None, str] = UNSET + params["affects__flaw__created_dt__date__lte"] = json_affects_flaw_created_dt_date_lte + + json_affects_flaw_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(affects_flaw_created_dt_gt, Unset): - json_affects_flaw_created_dt_gt = ( - affects_flaw_created_dt_gt.isoformat() - if affects_flaw_created_dt_gt - else None - ) + json_affects_flaw_created_dt_gt = affects_flaw_created_dt_gt.isoformat() - json_affects_flaw_created_dt_gte: Union[Unset, None, str] = UNSET + params["affects__flaw__created_dt__gt"] = json_affects_flaw_created_dt_gt + + json_affects_flaw_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(affects_flaw_created_dt_gte, Unset): - json_affects_flaw_created_dt_gte = ( - affects_flaw_created_dt_gte.isoformat() - if affects_flaw_created_dt_gte - else None - ) + json_affects_flaw_created_dt_gte = affects_flaw_created_dt_gte.isoformat() - json_affects_flaw_created_dt_lt: Union[Unset, None, str] = UNSET + params["affects__flaw__created_dt__gte"] = json_affects_flaw_created_dt_gte + + json_affects_flaw_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(affects_flaw_created_dt_lt, Unset): - json_affects_flaw_created_dt_lt = ( - affects_flaw_created_dt_lt.isoformat() - if affects_flaw_created_dt_lt - else None - ) + json_affects_flaw_created_dt_lt = affects_flaw_created_dt_lt.isoformat() - json_affects_flaw_created_dt_lte: Union[Unset, None, str] = UNSET + params["affects__flaw__created_dt__lt"] = json_affects_flaw_created_dt_lt + + json_affects_flaw_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(affects_flaw_created_dt_lte, Unset): - json_affects_flaw_created_dt_lte = ( - affects_flaw_created_dt_lte.isoformat() - if affects_flaw_created_dt_lte - else None - ) + json_affects_flaw_created_dt_lte = affects_flaw_created_dt_lte.isoformat() + + params["affects__flaw__created_dt__lte"] = json_affects_flaw_created_dt_lte + + params["affects__flaw__cve_id"] = affects_flaw_cve_id + + params["affects__flaw__cwe_id"] = affects_flaw_cwe_id - json_affects_flaw_impact: Union[Unset, None, str] = UNSET + params["affects__flaw__embargoed"] = affects_flaw_embargoed + + json_affects_flaw_impact: Union[Unset, str] = UNSET if not isinstance(affects_flaw_impact, Unset): + json_affects_flaw_impact = OsidbApiV1TrackersListAffectsFlawImpact(affects_flaw_impact).value - json_affects_flaw_impact = ( - OsidbApiV1TrackersListAffectsFlawImpact(affects_flaw_impact).value - if affects_flaw_impact - else None - ) + params["affects__flaw__impact"] = json_affects_flaw_impact - json_affects_flaw_reported_dt: Union[Unset, None, str] = UNSET + json_affects_flaw_reported_dt: Union[Unset, str] = UNSET if not isinstance(affects_flaw_reported_dt, Unset): - json_affects_flaw_reported_dt = ( - affects_flaw_reported_dt.isoformat() if affects_flaw_reported_dt else None - ) + json_affects_flaw_reported_dt = affects_flaw_reported_dt.isoformat() + + params["affects__flaw__reported_dt"] = json_affects_flaw_reported_dt - json_affects_flaw_reported_dt_date: Union[Unset, None, str] = UNSET + json_affects_flaw_reported_dt_date: Union[Unset, str] = UNSET if not isinstance(affects_flaw_reported_dt_date, Unset): - json_affects_flaw_reported_dt_date = ( - affects_flaw_reported_dt_date.isoformat() - if affects_flaw_reported_dt_date - else None - ) + json_affects_flaw_reported_dt_date = affects_flaw_reported_dt_date.isoformat() + + params["affects__flaw__reported_dt__date"] = json_affects_flaw_reported_dt_date - json_affects_flaw_reported_dt_date_gte: Union[Unset, None, str] = UNSET + json_affects_flaw_reported_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(affects_flaw_reported_dt_date_gte, Unset): - json_affects_flaw_reported_dt_date_gte = ( - affects_flaw_reported_dt_date_gte.isoformat() - if affects_flaw_reported_dt_date_gte - else None - ) + json_affects_flaw_reported_dt_date_gte = affects_flaw_reported_dt_date_gte.isoformat() - json_affects_flaw_reported_dt_date_lte: Union[Unset, None, str] = UNSET + params["affects__flaw__reported_dt__date__gte"] = json_affects_flaw_reported_dt_date_gte + + json_affects_flaw_reported_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(affects_flaw_reported_dt_date_lte, Unset): - json_affects_flaw_reported_dt_date_lte = ( - affects_flaw_reported_dt_date_lte.isoformat() - if affects_flaw_reported_dt_date_lte - else None - ) + json_affects_flaw_reported_dt_date_lte = affects_flaw_reported_dt_date_lte.isoformat() - json_affects_flaw_reported_dt_gt: Union[Unset, None, str] = UNSET + params["affects__flaw__reported_dt__date__lte"] = json_affects_flaw_reported_dt_date_lte + + json_affects_flaw_reported_dt_gt: Union[Unset, str] = UNSET if not isinstance(affects_flaw_reported_dt_gt, Unset): - json_affects_flaw_reported_dt_gt = ( - affects_flaw_reported_dt_gt.isoformat() - if affects_flaw_reported_dt_gt - else None - ) + json_affects_flaw_reported_dt_gt = affects_flaw_reported_dt_gt.isoformat() - json_affects_flaw_reported_dt_gte: Union[Unset, None, str] = UNSET + params["affects__flaw__reported_dt__gt"] = json_affects_flaw_reported_dt_gt + + json_affects_flaw_reported_dt_gte: Union[Unset, str] = UNSET if not isinstance(affects_flaw_reported_dt_gte, Unset): - json_affects_flaw_reported_dt_gte = ( - affects_flaw_reported_dt_gte.isoformat() - if affects_flaw_reported_dt_gte - else None - ) + json_affects_flaw_reported_dt_gte = affects_flaw_reported_dt_gte.isoformat() - json_affects_flaw_reported_dt_lt: Union[Unset, None, str] = UNSET + params["affects__flaw__reported_dt__gte"] = json_affects_flaw_reported_dt_gte + + json_affects_flaw_reported_dt_lt: Union[Unset, str] = UNSET if not isinstance(affects_flaw_reported_dt_lt, Unset): - json_affects_flaw_reported_dt_lt = ( - affects_flaw_reported_dt_lt.isoformat() - if affects_flaw_reported_dt_lt - else None - ) + json_affects_flaw_reported_dt_lt = affects_flaw_reported_dt_lt.isoformat() + + params["affects__flaw__reported_dt__lt"] = json_affects_flaw_reported_dt_lt - json_affects_flaw_reported_dt_lte: Union[Unset, None, str] = UNSET + json_affects_flaw_reported_dt_lte: Union[Unset, str] = UNSET if not isinstance(affects_flaw_reported_dt_lte, Unset): - json_affects_flaw_reported_dt_lte = ( - affects_flaw_reported_dt_lte.isoformat() - if affects_flaw_reported_dt_lte - else None - ) + json_affects_flaw_reported_dt_lte = affects_flaw_reported_dt_lte.isoformat() + + params["affects__flaw__reported_dt__lte"] = json_affects_flaw_reported_dt_lte - json_affects_flaw_source: Union[Unset, None, str] = UNSET + json_affects_flaw_source: Union[Unset, str] = UNSET if not isinstance(affects_flaw_source, Unset): + json_affects_flaw_source = OsidbApiV1TrackersListAffectsFlawSource(affects_flaw_source).value - json_affects_flaw_source = ( - OsidbApiV1TrackersListAffectsFlawSource(affects_flaw_source).value - if affects_flaw_source - else None - ) + params["affects__flaw__source"] = json_affects_flaw_source - json_affects_flaw_unembargo_dt: Union[Unset, None, str] = UNSET + json_affects_flaw_unembargo_dt: Union[Unset, str] = UNSET if not isinstance(affects_flaw_unembargo_dt, Unset): - json_affects_flaw_unembargo_dt = ( - affects_flaw_unembargo_dt.isoformat() if affects_flaw_unembargo_dt else None - ) + json_affects_flaw_unembargo_dt = affects_flaw_unembargo_dt.isoformat() - json_affects_flaw_updated_dt: Union[Unset, None, str] = UNSET + params["affects__flaw__unembargo_dt"] = json_affects_flaw_unembargo_dt + + json_affects_flaw_updated_dt: Union[Unset, str] = UNSET if not isinstance(affects_flaw_updated_dt, Unset): - json_affects_flaw_updated_dt = ( - affects_flaw_updated_dt.isoformat() if affects_flaw_updated_dt else None - ) + json_affects_flaw_updated_dt = affects_flaw_updated_dt.isoformat() - json_affects_flaw_updated_dt_date: Union[Unset, None, str] = UNSET + params["affects__flaw__updated_dt"] = json_affects_flaw_updated_dt + + json_affects_flaw_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(affects_flaw_updated_dt_date, Unset): - json_affects_flaw_updated_dt_date = ( - affects_flaw_updated_dt_date.isoformat() - if affects_flaw_updated_dt_date - else None - ) + json_affects_flaw_updated_dt_date = affects_flaw_updated_dt_date.isoformat() + + params["affects__flaw__updated_dt__date"] = json_affects_flaw_updated_dt_date - json_affects_flaw_updated_dt_date_gte: Union[Unset, None, str] = UNSET + json_affects_flaw_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(affects_flaw_updated_dt_date_gte, Unset): - json_affects_flaw_updated_dt_date_gte = ( - affects_flaw_updated_dt_date_gte.isoformat() - if affects_flaw_updated_dt_date_gte - else None - ) + json_affects_flaw_updated_dt_date_gte = affects_flaw_updated_dt_date_gte.isoformat() + + params["affects__flaw__updated_dt__date__gte"] = json_affects_flaw_updated_dt_date_gte - json_affects_flaw_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_affects_flaw_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(affects_flaw_updated_dt_date_lte, Unset): - json_affects_flaw_updated_dt_date_lte = ( - affects_flaw_updated_dt_date_lte.isoformat() - if affects_flaw_updated_dt_date_lte - else None - ) + json_affects_flaw_updated_dt_date_lte = affects_flaw_updated_dt_date_lte.isoformat() + + params["affects__flaw__updated_dt__date__lte"] = json_affects_flaw_updated_dt_date_lte - json_affects_flaw_updated_dt_gt: Union[Unset, None, str] = UNSET + json_affects_flaw_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(affects_flaw_updated_dt_gt, Unset): - json_affects_flaw_updated_dt_gt = ( - affects_flaw_updated_dt_gt.isoformat() - if affects_flaw_updated_dt_gt - else None - ) + json_affects_flaw_updated_dt_gt = affects_flaw_updated_dt_gt.isoformat() + + params["affects__flaw__updated_dt__gt"] = json_affects_flaw_updated_dt_gt - json_affects_flaw_updated_dt_gte: Union[Unset, None, str] = UNSET + json_affects_flaw_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(affects_flaw_updated_dt_gte, Unset): - json_affects_flaw_updated_dt_gte = ( - affects_flaw_updated_dt_gte.isoformat() - if affects_flaw_updated_dt_gte - else None - ) + json_affects_flaw_updated_dt_gte = affects_flaw_updated_dt_gte.isoformat() + + params["affects__flaw__updated_dt__gte"] = json_affects_flaw_updated_dt_gte - json_affects_flaw_updated_dt_lt: Union[Unset, None, str] = UNSET + json_affects_flaw_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(affects_flaw_updated_dt_lt, Unset): - json_affects_flaw_updated_dt_lt = ( - affects_flaw_updated_dt_lt.isoformat() - if affects_flaw_updated_dt_lt - else None - ) + json_affects_flaw_updated_dt_lt = affects_flaw_updated_dt_lt.isoformat() - json_affects_flaw_updated_dt_lte: Union[Unset, None, str] = UNSET + params["affects__flaw__updated_dt__lt"] = json_affects_flaw_updated_dt_lt + + json_affects_flaw_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(affects_flaw_updated_dt_lte, Unset): - json_affects_flaw_updated_dt_lte = ( - affects_flaw_updated_dt_lte.isoformat() - if affects_flaw_updated_dt_lte - else None - ) + json_affects_flaw_updated_dt_lte = affects_flaw_updated_dt_lte.isoformat() - json_affects_impact: Union[Unset, None, str] = UNSET + params["affects__flaw__updated_dt__lte"] = json_affects_flaw_updated_dt_lte + + json_affects_flaw_uuid: Union[Unset, str] = UNSET + if not isinstance(affects_flaw_uuid, Unset): + json_affects_flaw_uuid = str(affects_flaw_uuid) + + params["affects__flaw__uuid"] = json_affects_flaw_uuid + + json_affects_impact: Union[Unset, str] = UNSET if not isinstance(affects_impact, Unset): + json_affects_impact = OsidbApiV1TrackersListAffectsImpact(affects_impact).value - json_affects_impact = ( - OsidbApiV1TrackersListAffectsImpact(affects_impact).value - if affects_impact - else None - ) + params["affects__impact"] = json_affects_impact - json_affects_resolution: Union[Unset, None, str] = UNSET + params["affects__ps_component"] = affects_ps_component + + params["affects__ps_module"] = affects_ps_module + + json_affects_resolution: Union[Unset, str] = UNSET if not isinstance(affects_resolution, Unset): + json_affects_resolution = OsidbApiV1TrackersListAffectsResolution(affects_resolution).value - json_affects_resolution = ( - OsidbApiV1TrackersListAffectsResolution(affects_resolution).value - if affects_resolution - else None - ) + params["affects__resolution"] = json_affects_resolution - json_affects_updated_dt: Union[Unset, None, str] = UNSET + json_affects_updated_dt: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt, Unset): - json_affects_updated_dt = ( - affects_updated_dt.isoformat() if affects_updated_dt else None - ) + json_affects_updated_dt = affects_updated_dt.isoformat() + + params["affects__updated_dt"] = json_affects_updated_dt - json_affects_updated_dt_date: Union[Unset, None, str] = UNSET + json_affects_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_date, Unset): - json_affects_updated_dt_date = ( - affects_updated_dt_date.isoformat() if affects_updated_dt_date else None - ) + json_affects_updated_dt_date = affects_updated_dt_date.isoformat() + + params["affects__updated_dt__date"] = json_affects_updated_dt_date - json_affects_updated_dt_date_gte: Union[Unset, None, str] = UNSET + json_affects_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_date_gte, Unset): - json_affects_updated_dt_date_gte = ( - affects_updated_dt_date_gte.isoformat() - if affects_updated_dt_date_gte - else None - ) + json_affects_updated_dt_date_gte = affects_updated_dt_date_gte.isoformat() + + params["affects__updated_dt__date__gte"] = json_affects_updated_dt_date_gte - json_affects_updated_dt_date_lte: Union[Unset, None, str] = UNSET + json_affects_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_date_lte, Unset): - json_affects_updated_dt_date_lte = ( - affects_updated_dt_date_lte.isoformat() - if affects_updated_dt_date_lte - else None - ) + json_affects_updated_dt_date_lte = affects_updated_dt_date_lte.isoformat() + + params["affects__updated_dt__date__lte"] = json_affects_updated_dt_date_lte - json_affects_updated_dt_gt: Union[Unset, None, str] = UNSET + json_affects_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_gt, Unset): - json_affects_updated_dt_gt = ( - affects_updated_dt_gt.isoformat() if affects_updated_dt_gt else None - ) + json_affects_updated_dt_gt = affects_updated_dt_gt.isoformat() - json_affects_updated_dt_gte: Union[Unset, None, str] = UNSET + params["affects__updated_dt__gt"] = json_affects_updated_dt_gt + + json_affects_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_gte, Unset): - json_affects_updated_dt_gte = ( - affects_updated_dt_gte.isoformat() if affects_updated_dt_gte else None - ) + json_affects_updated_dt_gte = affects_updated_dt_gte.isoformat() - json_affects_updated_dt_lt: Union[Unset, None, str] = UNSET + params["affects__updated_dt__gte"] = json_affects_updated_dt_gte + + json_affects_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_lt, Unset): - json_affects_updated_dt_lt = ( - affects_updated_dt_lt.isoformat() if affects_updated_dt_lt else None - ) + json_affects_updated_dt_lt = affects_updated_dt_lt.isoformat() - json_affects_updated_dt_lte: Union[Unset, None, str] = UNSET + params["affects__updated_dt__lt"] = json_affects_updated_dt_lt + + json_affects_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(affects_updated_dt_lte, Unset): - json_affects_updated_dt_lte = ( - affects_updated_dt_lte.isoformat() if affects_updated_dt_lte else None - ) + json_affects_updated_dt_lte = affects_updated_dt_lte.isoformat() - json_created_dt: Union[Unset, None, str] = UNSET + params["affects__updated_dt__lte"] = json_affects_updated_dt_lte + + json_affects_uuid: Union[Unset, str] = UNSET + if not isinstance(affects_uuid, Unset): + json_affects_uuid = str(affects_uuid) + + params["affects__uuid"] = json_affects_uuid + + json_created_dt: Union[Unset, str] = UNSET if not isinstance(created_dt, Unset): - json_created_dt = created_dt.isoformat() if created_dt else None + json_created_dt = created_dt.isoformat() - json_created_dt_date: Union[Unset, None, str] = UNSET + params["created_dt"] = json_created_dt + + json_created_dt_date: Union[Unset, str] = UNSET if not isinstance(created_dt_date, Unset): - json_created_dt_date = created_dt_date.isoformat() if created_dt_date else None + json_created_dt_date = created_dt_date.isoformat() + + params["created_dt__date"] = json_created_dt_date - json_created_dt_date_gte: Union[Unset, None, str] = UNSET + json_created_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_gte, Unset): - json_created_dt_date_gte = ( - created_dt_date_gte.isoformat() if created_dt_date_gte else None - ) + json_created_dt_date_gte = created_dt_date_gte.isoformat() + + params["created_dt__date__gte"] = json_created_dt_date_gte - json_created_dt_date_lte: Union[Unset, None, str] = UNSET + json_created_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_date_lte, Unset): - json_created_dt_date_lte = ( - created_dt_date_lte.isoformat() if created_dt_date_lte else None - ) + json_created_dt_date_lte = created_dt_date_lte.isoformat() + + params["created_dt__date__lte"] = json_created_dt_date_lte - json_created_dt_gt: Union[Unset, None, str] = UNSET + json_created_dt_gt: Union[Unset, str] = UNSET if not isinstance(created_dt_gt, Unset): - json_created_dt_gt = created_dt_gt.isoformat() if created_dt_gt else None + json_created_dt_gt = created_dt_gt.isoformat() - json_created_dt_gte: Union[Unset, None, str] = UNSET + params["created_dt__gt"] = json_created_dt_gt + + json_created_dt_gte: Union[Unset, str] = UNSET if not isinstance(created_dt_gte, Unset): - json_created_dt_gte = created_dt_gte.isoformat() if created_dt_gte else None + json_created_dt_gte = created_dt_gte.isoformat() + + params["created_dt__gte"] = json_created_dt_gte - json_created_dt_lt: Union[Unset, None, str] = UNSET + json_created_dt_lt: Union[Unset, str] = UNSET if not isinstance(created_dt_lt, Unset): - json_created_dt_lt = created_dt_lt.isoformat() if created_dt_lt else None + json_created_dt_lt = created_dt_lt.isoformat() + + params["created_dt__lt"] = json_created_dt_lt - json_created_dt_lte: Union[Unset, None, str] = UNSET + json_created_dt_lte: Union[Unset, str] = UNSET if not isinstance(created_dt_lte, Unset): - json_created_dt_lte = created_dt_lte.isoformat() if created_dt_lte else None + json_created_dt_lte = created_dt_lte.isoformat() + + params["created_dt__lte"] = json_created_dt_lte + + params["embargoed"] = embargoed - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + params["external_system_id"] = external_system_id + + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields - json_include_meta_attr: Union[Unset, None, List[str]] = UNSET + params["include_fields"] = json_include_fields + + json_include_meta_attr: Union[Unset, list[str]] = UNSET if not isinstance(include_meta_attr, Unset): - if include_meta_attr is None: - json_include_meta_attr = None - else: - json_include_meta_attr = include_meta_attr + json_include_meta_attr = include_meta_attr + + params["include_meta_attr"] = json_include_meta_attr + + params["limit"] = limit + + params["offset"] = offset - json_order: Union[Unset, None, List[str]] = UNSET + json_order: Union[Unset, list[str]] = UNSET if not isinstance(order, Unset): - if order is None: - json_order = None - else: - json_order = [] - for order_item_data in order: - order_item: str = UNSET - if not isinstance(order_item_data, Unset): + json_order = [] + for order_item_data in order: + order_item: str = UNSET + if not isinstance(order_item_data, Unset): + order_item = OsidbApiV1TrackersListOrderItem(order_item_data).value + + json_order.append(order_item) + + params["order"] = json_order + + params["ps_update_stream"] = ps_update_stream - order_item = OsidbApiV1TrackersListOrderItem(order_item_data).value + params["resolution"] = resolution - json_order.append(order_item) + params["status"] = status - json_type: Union[Unset, None, str] = UNSET - if not isinstance(type, Unset): + json_type_: Union[Unset, str] = UNSET + if not isinstance(type_, Unset): + json_type_ = OsidbApiV1TrackersListType(type_).value - json_type = OsidbApiV1TrackersListType(type).value if type else None + params["type"] = json_type_ - json_updated_dt: Union[Unset, None, str] = UNSET + json_updated_dt: Union[Unset, str] = UNSET if not isinstance(updated_dt, Unset): - json_updated_dt = updated_dt.isoformat() if updated_dt else None + json_updated_dt = updated_dt.isoformat() - json_updated_dt_date: Union[Unset, None, str] = UNSET + params["updated_dt"] = json_updated_dt + + json_updated_dt_date: Union[Unset, str] = UNSET if not isinstance(updated_dt_date, Unset): - json_updated_dt_date = updated_dt_date.isoformat() if updated_dt_date else None + json_updated_dt_date = updated_dt_date.isoformat() + + params["updated_dt__date"] = json_updated_dt_date - json_updated_dt_date_gte: Union[Unset, None, str] = UNSET + json_updated_dt_date_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_gte, Unset): - json_updated_dt_date_gte = ( - updated_dt_date_gte.isoformat() if updated_dt_date_gte else None - ) + json_updated_dt_date_gte = updated_dt_date_gte.isoformat() - json_updated_dt_date_lte: Union[Unset, None, str] = UNSET + params["updated_dt__date__gte"] = json_updated_dt_date_gte + + json_updated_dt_date_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_date_lte, Unset): - json_updated_dt_date_lte = ( - updated_dt_date_lte.isoformat() if updated_dt_date_lte else None - ) + json_updated_dt_date_lte = updated_dt_date_lte.isoformat() - json_updated_dt_gt: Union[Unset, None, str] = UNSET + params["updated_dt__date__lte"] = json_updated_dt_date_lte + + json_updated_dt_gt: Union[Unset, str] = UNSET if not isinstance(updated_dt_gt, Unset): - json_updated_dt_gt = updated_dt_gt.isoformat() if updated_dt_gt else None + json_updated_dt_gt = updated_dt_gt.isoformat() + + params["updated_dt__gt"] = json_updated_dt_gt - json_updated_dt_gte: Union[Unset, None, str] = UNSET + json_updated_dt_gte: Union[Unset, str] = UNSET if not isinstance(updated_dt_gte, Unset): - json_updated_dt_gte = updated_dt_gte.isoformat() if updated_dt_gte else None + json_updated_dt_gte = updated_dt_gte.isoformat() - json_updated_dt_lt: Union[Unset, None, str] = UNSET + params["updated_dt__gte"] = json_updated_dt_gte + + json_updated_dt_lt: Union[Unset, str] = UNSET if not isinstance(updated_dt_lt, Unset): - json_updated_dt_lt = updated_dt_lt.isoformat() if updated_dt_lt else None + json_updated_dt_lt = updated_dt_lt.isoformat() + + params["updated_dt__lt"] = json_updated_dt_lt - json_updated_dt_lte: Union[Unset, None, str] = UNSET + json_updated_dt_lte: Union[Unset, str] = UNSET if not isinstance(updated_dt_lte, Unset): - json_updated_dt_lte = updated_dt_lte.isoformat() if updated_dt_lte else None - - params: Dict[str, Any] = { - "affects__affectedness": json_affects_affectedness, - "affects__created_dt": json_affects_created_dt, - "affects__created_dt__date": json_affects_created_dt_date, - "affects__created_dt__date__gte": json_affects_created_dt_date_gte, - "affects__created_dt__date__lte": json_affects_created_dt_date_lte, - "affects__created_dt__gt": json_affects_created_dt_gt, - "affects__created_dt__gte": json_affects_created_dt_gte, - "affects__created_dt__lt": json_affects_created_dt_lt, - "affects__created_dt__lte": json_affects_created_dt_lte, - "affects__embargoed": affects_embargoed, - "affects__flaw__components": json_affects_flaw_components, - "affects__flaw__created_dt": json_affects_flaw_created_dt, - "affects__flaw__created_dt__date": json_affects_flaw_created_dt_date, - "affects__flaw__created_dt__date__gte": json_affects_flaw_created_dt_date_gte, - "affects__flaw__created_dt__date__lte": json_affects_flaw_created_dt_date_lte, - "affects__flaw__created_dt__gt": json_affects_flaw_created_dt_gt, - "affects__flaw__created_dt__gte": json_affects_flaw_created_dt_gte, - "affects__flaw__created_dt__lt": json_affects_flaw_created_dt_lt, - "affects__flaw__created_dt__lte": json_affects_flaw_created_dt_lte, - "affects__flaw__cve_id": affects_flaw_cve_id, - "affects__flaw__cwe_id": affects_flaw_cwe_id, - "affects__flaw__embargoed": affects_flaw_embargoed, - "affects__flaw__impact": json_affects_flaw_impact, - "affects__flaw__reported_dt": json_affects_flaw_reported_dt, - "affects__flaw__reported_dt__date": json_affects_flaw_reported_dt_date, - "affects__flaw__reported_dt__date__gte": json_affects_flaw_reported_dt_date_gte, - "affects__flaw__reported_dt__date__lte": json_affects_flaw_reported_dt_date_lte, - "affects__flaw__reported_dt__gt": json_affects_flaw_reported_dt_gt, - "affects__flaw__reported_dt__gte": json_affects_flaw_reported_dt_gte, - "affects__flaw__reported_dt__lt": json_affects_flaw_reported_dt_lt, - "affects__flaw__reported_dt__lte": json_affects_flaw_reported_dt_lte, - "affects__flaw__source": json_affects_flaw_source, - "affects__flaw__unembargo_dt": json_affects_flaw_unembargo_dt, - "affects__flaw__updated_dt": json_affects_flaw_updated_dt, - "affects__flaw__updated_dt__date": json_affects_flaw_updated_dt_date, - "affects__flaw__updated_dt__date__gte": json_affects_flaw_updated_dt_date_gte, - "affects__flaw__updated_dt__date__lte": json_affects_flaw_updated_dt_date_lte, - "affects__flaw__updated_dt__gt": json_affects_flaw_updated_dt_gt, - "affects__flaw__updated_dt__gte": json_affects_flaw_updated_dt_gte, - "affects__flaw__updated_dt__lt": json_affects_flaw_updated_dt_lt, - "affects__flaw__updated_dt__lte": json_affects_flaw_updated_dt_lte, - "affects__flaw__uuid": affects_flaw_uuid, - "affects__impact": json_affects_impact, - "affects__ps_component": affects_ps_component, - "affects__ps_module": affects_ps_module, - "affects__resolution": json_affects_resolution, - "affects__updated_dt": json_affects_updated_dt, - "affects__updated_dt__date": json_affects_updated_dt_date, - "affects__updated_dt__date__gte": json_affects_updated_dt_date_gte, - "affects__updated_dt__date__lte": json_affects_updated_dt_date_lte, - "affects__updated_dt__gt": json_affects_updated_dt_gt, - "affects__updated_dt__gte": json_affects_updated_dt_gte, - "affects__updated_dt__lt": json_affects_updated_dt_lt, - "affects__updated_dt__lte": json_affects_updated_dt_lte, - "affects__uuid": affects_uuid, - "created_dt": json_created_dt, - "created_dt__date": json_created_dt_date, - "created_dt__date__gte": json_created_dt_date_gte, - "created_dt__date__lte": json_created_dt_date_lte, - "created_dt__gt": json_created_dt_gt, - "created_dt__gte": json_created_dt_gte, - "created_dt__lt": json_created_dt_lt, - "created_dt__lte": json_created_dt_lte, - "embargoed": embargoed, - "exclude_fields": json_exclude_fields, - "external_system_id": external_system_id, - "include_fields": json_include_fields, - "include_meta_attr": json_include_meta_attr, - "limit": limit, - "offset": offset, - "order": json_order, - "ps_update_stream": ps_update_stream, - "resolution": resolution, - "status": status, - "type": json_type, - "updated_dt": json_updated_dt, - "updated_dt__date": json_updated_dt_date, - "updated_dt__date__gte": json_updated_dt_date_gte, - "updated_dt__date__lte": json_updated_dt_date_lte, - "updated_dt__gt": json_updated_dt_gt, - "updated_dt__gte": json_updated_dt_gte, - "updated_dt__lt": json_updated_dt_lt, - "updated_dt__lte": json_updated_dt_lte, - "uuid": uuid, - } + json_updated_dt_lte = updated_dt_lte.isoformat() + + params["updated_dt__lte"] = json_updated_dt_lte + + json_uuid: Union[Unset, str] = UNSET + if not isinstance(uuid, Unset): + json_uuid = str(uuid) + + params["uuid"] = json_uuid + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/api/v1/trackers", "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1TrackersListResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1TrackersListResponse200 if isinstance(_response_200, Unset): @@ -785,116 +689,202 @@ def _parse_response( response_200 = OsidbApiV1TrackersListResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1TrackersListResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - affects_affectedness: Union[ - Unset, None, OsidbApiV1TrackersListAffectsAffectedness - ] = UNSET, - affects_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_embargoed: Union[Unset, None, bool] = UNSET, - affects_flaw_components: Union[Unset, None, List[str]] = UNSET, - affects_flaw_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_cve_id: Union[Unset, None, str] = UNSET, - affects_flaw_cwe_id: Union[Unset, None, str] = UNSET, - affects_flaw_embargoed: Union[Unset, None, bool] = UNSET, - affects_flaw_impact: Union[ - Unset, None, OsidbApiV1TrackersListAffectsFlawImpact - ] = UNSET, - affects_flaw_reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_source: Union[ - Unset, None, OsidbApiV1TrackersListAffectsFlawSource - ] = UNSET, - affects_flaw_unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_uuid: Union[Unset, None, str] = UNSET, - affects_impact: Union[Unset, None, OsidbApiV1TrackersListAffectsImpact] = UNSET, - affects_ps_component: Union[Unset, None, str] = UNSET, - affects_ps_module: Union[Unset, None, str] = UNSET, - affects_resolution: Union[ - Unset, None, OsidbApiV1TrackersListAffectsResolution - ] = UNSET, - affects_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_uuid: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - external_system_id: Union[Unset, None, str] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1TrackersListOrderItem]] = UNSET, - ps_update_stream: Union[Unset, None, str] = UNSET, - resolution: Union[Unset, None, str] = UNSET, - status: Union[Unset, None, str] = UNSET, - type: Union[Unset, None, OsidbApiV1TrackersListType] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + affects_affectedness: Union[Unset, OsidbApiV1TrackersListAffectsAffectedness] = UNSET, + affects_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_embargoed: Union[Unset, bool] = UNSET, + affects_flaw_components: Union[Unset, list[str]] = UNSET, + affects_flaw_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_cve_id: Union[Unset, str] = UNSET, + affects_flaw_cwe_id: Union[Unset, str] = UNSET, + affects_flaw_embargoed: Union[Unset, bool] = UNSET, + affects_flaw_impact: Union[Unset, OsidbApiV1TrackersListAffectsFlawImpact] = UNSET, + affects_flaw_reported_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_source: Union[Unset, OsidbApiV1TrackersListAffectsFlawSource] = UNSET, + affects_flaw_unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_uuid: Union[Unset, UUID] = UNSET, + affects_impact: Union[Unset, OsidbApiV1TrackersListAffectsImpact] = UNSET, + affects_ps_component: Union[Unset, str] = UNSET, + affects_ps_module: Union[Unset, str] = UNSET, + affects_resolution: Union[Unset, OsidbApiV1TrackersListAffectsResolution] = UNSET, + affects_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_uuid: Union[Unset, UUID] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + external_system_id: Union[Unset, str] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1TrackersListOrderItem]] = UNSET, + ps_update_stream: Union[Unset, str] = UNSET, + resolution: Union[Unset, str] = UNSET, + status: Union[Unset, str] = UNSET, + type_: Union[Unset, OsidbApiV1TrackersListType] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Response[OsidbApiV1TrackersListResponse200]: + """ + Args: + affects_affectedness (Union[Unset, OsidbApiV1TrackersListAffectsAffectedness]): + affects_created_dt (Union[Unset, datetime.datetime]): + affects_created_dt_date (Union[Unset, datetime.date]): + affects_created_dt_date_gte (Union[Unset, datetime.date]): + affects_created_dt_date_lte (Union[Unset, datetime.date]): + affects_created_dt_gt (Union[Unset, datetime.datetime]): + affects_created_dt_gte (Union[Unset, datetime.datetime]): + affects_created_dt_lt (Union[Unset, datetime.datetime]): + affects_created_dt_lte (Union[Unset, datetime.datetime]): + affects_embargoed (Union[Unset, bool]): + affects_flaw_components (Union[Unset, list[str]]): + affects_flaw_created_dt (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_date (Union[Unset, datetime.date]): + affects_flaw_created_dt_date_gte (Union[Unset, datetime.date]): + affects_flaw_created_dt_date_lte (Union[Unset, datetime.date]): + affects_flaw_created_dt_gt (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_gte (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_lt (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_lte (Union[Unset, datetime.datetime]): + affects_flaw_cve_id (Union[Unset, str]): + affects_flaw_cwe_id (Union[Unset, str]): + affects_flaw_embargoed (Union[Unset, bool]): + affects_flaw_impact (Union[Unset, OsidbApiV1TrackersListAffectsFlawImpact]): + affects_flaw_reported_dt (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_date (Union[Unset, datetime.date]): + affects_flaw_reported_dt_date_gte (Union[Unset, datetime.date]): + affects_flaw_reported_dt_date_lte (Union[Unset, datetime.date]): + affects_flaw_reported_dt_gt (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_gte (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_lt (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_lte (Union[Unset, datetime.datetime]): + affects_flaw_source (Union[Unset, OsidbApiV1TrackersListAffectsFlawSource]): + affects_flaw_unembargo_dt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_date (Union[Unset, datetime.date]): + affects_flaw_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_flaw_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_flaw_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_flaw_uuid (Union[Unset, UUID]): + affects_impact (Union[Unset, OsidbApiV1TrackersListAffectsImpact]): + affects_ps_component (Union[Unset, str]): + affects_ps_module (Union[Unset, str]): + affects_resolution (Union[Unset, OsidbApiV1TrackersListAffectsResolution]): + affects_updated_dt (Union[Unset, datetime.datetime]): + affects_updated_dt_date (Union[Unset, datetime.date]): + affects_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_uuid (Union[Unset, UUID]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + embargoed (Union[Unset, bool]): + exclude_fields (Union[Unset, list[str]]): + external_system_id (Union[Unset, str]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + order (Union[Unset, list[OsidbApiV1TrackersListOrderItem]]): + ps_update_stream (Union[Unset, str]): + resolution (Union[Unset, str]): + status (Union[Unset, str]): + type_ (Union[Unset, OsidbApiV1TrackersListType]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1TrackersListResponse200] + """ + kwargs = _get_kwargs( client=client, affects_affectedness=affects_affectedness, @@ -971,7 +961,7 @@ def sync_detailed( ps_update_stream=ps_update_stream, resolution=resolution, status=status, - type=type, + type_=type_, updated_dt=updated_dt, updated_dt_date=updated_dt_date, updated_dt_date_gte=updated_dt_date_gte, @@ -991,106 +981,191 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - affects_affectedness: Union[ - Unset, None, OsidbApiV1TrackersListAffectsAffectedness - ] = UNSET, - affects_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_embargoed: Union[Unset, None, bool] = UNSET, - affects_flaw_components: Union[Unset, None, List[str]] = UNSET, - affects_flaw_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_cve_id: Union[Unset, None, str] = UNSET, - affects_flaw_cwe_id: Union[Unset, None, str] = UNSET, - affects_flaw_embargoed: Union[Unset, None, bool] = UNSET, - affects_flaw_impact: Union[ - Unset, None, OsidbApiV1TrackersListAffectsFlawImpact - ] = UNSET, - affects_flaw_reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_source: Union[ - Unset, None, OsidbApiV1TrackersListAffectsFlawSource - ] = UNSET, - affects_flaw_unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_uuid: Union[Unset, None, str] = UNSET, - affects_impact: Union[Unset, None, OsidbApiV1TrackersListAffectsImpact] = UNSET, - affects_ps_component: Union[Unset, None, str] = UNSET, - affects_ps_module: Union[Unset, None, str] = UNSET, - affects_resolution: Union[ - Unset, None, OsidbApiV1TrackersListAffectsResolution - ] = UNSET, - affects_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_uuid: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - external_system_id: Union[Unset, None, str] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1TrackersListOrderItem]] = UNSET, - ps_update_stream: Union[Unset, None, str] = UNSET, - resolution: Union[Unset, None, str] = UNSET, - status: Union[Unset, None, str] = UNSET, - type: Union[Unset, None, OsidbApiV1TrackersListType] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + affects_affectedness: Union[Unset, OsidbApiV1TrackersListAffectsAffectedness] = UNSET, + affects_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_embargoed: Union[Unset, bool] = UNSET, + affects_flaw_components: Union[Unset, list[str]] = UNSET, + affects_flaw_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_cve_id: Union[Unset, str] = UNSET, + affects_flaw_cwe_id: Union[Unset, str] = UNSET, + affects_flaw_embargoed: Union[Unset, bool] = UNSET, + affects_flaw_impact: Union[Unset, OsidbApiV1TrackersListAffectsFlawImpact] = UNSET, + affects_flaw_reported_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_source: Union[Unset, OsidbApiV1TrackersListAffectsFlawSource] = UNSET, + affects_flaw_unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_uuid: Union[Unset, UUID] = UNSET, + affects_impact: Union[Unset, OsidbApiV1TrackersListAffectsImpact] = UNSET, + affects_ps_component: Union[Unset, str] = UNSET, + affects_ps_module: Union[Unset, str] = UNSET, + affects_resolution: Union[Unset, OsidbApiV1TrackersListAffectsResolution] = UNSET, + affects_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_uuid: Union[Unset, UUID] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + external_system_id: Union[Unset, str] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1TrackersListOrderItem]] = UNSET, + ps_update_stream: Union[Unset, str] = UNSET, + resolution: Union[Unset, str] = UNSET, + status: Union[Unset, str] = UNSET, + type_: Union[Unset, OsidbApiV1TrackersListType] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Optional[OsidbApiV1TrackersListResponse200]: - """ """ + """ + Args: + affects_affectedness (Union[Unset, OsidbApiV1TrackersListAffectsAffectedness]): + affects_created_dt (Union[Unset, datetime.datetime]): + affects_created_dt_date (Union[Unset, datetime.date]): + affects_created_dt_date_gte (Union[Unset, datetime.date]): + affects_created_dt_date_lte (Union[Unset, datetime.date]): + affects_created_dt_gt (Union[Unset, datetime.datetime]): + affects_created_dt_gte (Union[Unset, datetime.datetime]): + affects_created_dt_lt (Union[Unset, datetime.datetime]): + affects_created_dt_lte (Union[Unset, datetime.datetime]): + affects_embargoed (Union[Unset, bool]): + affects_flaw_components (Union[Unset, list[str]]): + affects_flaw_created_dt (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_date (Union[Unset, datetime.date]): + affects_flaw_created_dt_date_gte (Union[Unset, datetime.date]): + affects_flaw_created_dt_date_lte (Union[Unset, datetime.date]): + affects_flaw_created_dt_gt (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_gte (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_lt (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_lte (Union[Unset, datetime.datetime]): + affects_flaw_cve_id (Union[Unset, str]): + affects_flaw_cwe_id (Union[Unset, str]): + affects_flaw_embargoed (Union[Unset, bool]): + affects_flaw_impact (Union[Unset, OsidbApiV1TrackersListAffectsFlawImpact]): + affects_flaw_reported_dt (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_date (Union[Unset, datetime.date]): + affects_flaw_reported_dt_date_gte (Union[Unset, datetime.date]): + affects_flaw_reported_dt_date_lte (Union[Unset, datetime.date]): + affects_flaw_reported_dt_gt (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_gte (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_lt (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_lte (Union[Unset, datetime.datetime]): + affects_flaw_source (Union[Unset, OsidbApiV1TrackersListAffectsFlawSource]): + affects_flaw_unembargo_dt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_date (Union[Unset, datetime.date]): + affects_flaw_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_flaw_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_flaw_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_flaw_uuid (Union[Unset, UUID]): + affects_impact (Union[Unset, OsidbApiV1TrackersListAffectsImpact]): + affects_ps_component (Union[Unset, str]): + affects_ps_module (Union[Unset, str]): + affects_resolution (Union[Unset, OsidbApiV1TrackersListAffectsResolution]): + affects_updated_dt (Union[Unset, datetime.datetime]): + affects_updated_dt_date (Union[Unset, datetime.date]): + affects_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_uuid (Union[Unset, UUID]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + embargoed (Union[Unset, bool]): + exclude_fields (Union[Unset, list[str]]): + external_system_id (Union[Unset, str]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + order (Union[Unset, list[OsidbApiV1TrackersListOrderItem]]): + ps_update_stream (Union[Unset, str]): + resolution (Union[Unset, str]): + status (Union[Unset, str]): + type_ (Union[Unset, OsidbApiV1TrackersListType]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1TrackersListResponse200 + """ return sync_detailed( client=client, @@ -1168,7 +1243,7 @@ def sync( ps_update_stream=ps_update_stream, resolution=resolution, status=status, - type=type, + type_=type_, updated_dt=updated_dt, updated_dt_date=updated_dt_date, updated_dt_date_gte=updated_dt_date_gte, @@ -1181,102 +1256,189 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - affects_affectedness: Union[ - Unset, None, OsidbApiV1TrackersListAffectsAffectedness - ] = UNSET, - affects_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_embargoed: Union[Unset, None, bool] = UNSET, - affects_flaw_components: Union[Unset, None, List[str]] = UNSET, - affects_flaw_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_cve_id: Union[Unset, None, str] = UNSET, - affects_flaw_cwe_id: Union[Unset, None, str] = UNSET, - affects_flaw_embargoed: Union[Unset, None, bool] = UNSET, - affects_flaw_impact: Union[ - Unset, None, OsidbApiV1TrackersListAffectsFlawImpact - ] = UNSET, - affects_flaw_reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_source: Union[ - Unset, None, OsidbApiV1TrackersListAffectsFlawSource - ] = UNSET, - affects_flaw_unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_uuid: Union[Unset, None, str] = UNSET, - affects_impact: Union[Unset, None, OsidbApiV1TrackersListAffectsImpact] = UNSET, - affects_ps_component: Union[Unset, None, str] = UNSET, - affects_ps_module: Union[Unset, None, str] = UNSET, - affects_resolution: Union[ - Unset, None, OsidbApiV1TrackersListAffectsResolution - ] = UNSET, - affects_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_uuid: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - external_system_id: Union[Unset, None, str] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1TrackersListOrderItem]] = UNSET, - ps_update_stream: Union[Unset, None, str] = UNSET, - resolution: Union[Unset, None, str] = UNSET, - status: Union[Unset, None, str] = UNSET, - type: Union[Unset, None, OsidbApiV1TrackersListType] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + affects_affectedness: Union[Unset, OsidbApiV1TrackersListAffectsAffectedness] = UNSET, + affects_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_embargoed: Union[Unset, bool] = UNSET, + affects_flaw_components: Union[Unset, list[str]] = UNSET, + affects_flaw_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_cve_id: Union[Unset, str] = UNSET, + affects_flaw_cwe_id: Union[Unset, str] = UNSET, + affects_flaw_embargoed: Union[Unset, bool] = UNSET, + affects_flaw_impact: Union[Unset, OsidbApiV1TrackersListAffectsFlawImpact] = UNSET, + affects_flaw_reported_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_source: Union[Unset, OsidbApiV1TrackersListAffectsFlawSource] = UNSET, + affects_flaw_unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_uuid: Union[Unset, UUID] = UNSET, + affects_impact: Union[Unset, OsidbApiV1TrackersListAffectsImpact] = UNSET, + affects_ps_component: Union[Unset, str] = UNSET, + affects_ps_module: Union[Unset, str] = UNSET, + affects_resolution: Union[Unset, OsidbApiV1TrackersListAffectsResolution] = UNSET, + affects_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_uuid: Union[Unset, UUID] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + external_system_id: Union[Unset, str] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1TrackersListOrderItem]] = UNSET, + ps_update_stream: Union[Unset, str] = UNSET, + resolution: Union[Unset, str] = UNSET, + status: Union[Unset, str] = UNSET, + type_: Union[Unset, OsidbApiV1TrackersListType] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Response[OsidbApiV1TrackersListResponse200]: + """ + Args: + affects_affectedness (Union[Unset, OsidbApiV1TrackersListAffectsAffectedness]): + affects_created_dt (Union[Unset, datetime.datetime]): + affects_created_dt_date (Union[Unset, datetime.date]): + affects_created_dt_date_gte (Union[Unset, datetime.date]): + affects_created_dt_date_lte (Union[Unset, datetime.date]): + affects_created_dt_gt (Union[Unset, datetime.datetime]): + affects_created_dt_gte (Union[Unset, datetime.datetime]): + affects_created_dt_lt (Union[Unset, datetime.datetime]): + affects_created_dt_lte (Union[Unset, datetime.datetime]): + affects_embargoed (Union[Unset, bool]): + affects_flaw_components (Union[Unset, list[str]]): + affects_flaw_created_dt (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_date (Union[Unset, datetime.date]): + affects_flaw_created_dt_date_gte (Union[Unset, datetime.date]): + affects_flaw_created_dt_date_lte (Union[Unset, datetime.date]): + affects_flaw_created_dt_gt (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_gte (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_lt (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_lte (Union[Unset, datetime.datetime]): + affects_flaw_cve_id (Union[Unset, str]): + affects_flaw_cwe_id (Union[Unset, str]): + affects_flaw_embargoed (Union[Unset, bool]): + affects_flaw_impact (Union[Unset, OsidbApiV1TrackersListAffectsFlawImpact]): + affects_flaw_reported_dt (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_date (Union[Unset, datetime.date]): + affects_flaw_reported_dt_date_gte (Union[Unset, datetime.date]): + affects_flaw_reported_dt_date_lte (Union[Unset, datetime.date]): + affects_flaw_reported_dt_gt (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_gte (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_lt (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_lte (Union[Unset, datetime.datetime]): + affects_flaw_source (Union[Unset, OsidbApiV1TrackersListAffectsFlawSource]): + affects_flaw_unembargo_dt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_date (Union[Unset, datetime.date]): + affects_flaw_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_flaw_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_flaw_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_flaw_uuid (Union[Unset, UUID]): + affects_impact (Union[Unset, OsidbApiV1TrackersListAffectsImpact]): + affects_ps_component (Union[Unset, str]): + affects_ps_module (Union[Unset, str]): + affects_resolution (Union[Unset, OsidbApiV1TrackersListAffectsResolution]): + affects_updated_dt (Union[Unset, datetime.datetime]): + affects_updated_dt_date (Union[Unset, datetime.date]): + affects_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_uuid (Union[Unset, UUID]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + embargoed (Union[Unset, bool]): + exclude_fields (Union[Unset, list[str]]): + external_system_id (Union[Unset, str]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + order (Union[Unset, list[OsidbApiV1TrackersListOrderItem]]): + ps_update_stream (Union[Unset, str]): + resolution (Union[Unset, str]): + status (Union[Unset, str]): + type_ (Union[Unset, OsidbApiV1TrackersListType]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1TrackersListResponse200] + """ + kwargs = _get_kwargs( client=client, affects_affectedness=affects_affectedness, @@ -1353,7 +1515,7 @@ async def async_detailed( ps_update_stream=ps_update_stream, resolution=resolution, status=status, - type=type, + type_=type_, updated_dt=updated_dt, updated_dt_date=updated_dt_date, updated_dt_date_gte=updated_dt_date_gte, @@ -1373,109 +1535,194 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - affects_affectedness: Union[ - Unset, None, OsidbApiV1TrackersListAffectsAffectedness - ] = UNSET, - affects_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_embargoed: Union[Unset, None, bool] = UNSET, - affects_flaw_components: Union[Unset, None, List[str]] = UNSET, - affects_flaw_created_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_cve_id: Union[Unset, None, str] = UNSET, - affects_flaw_cwe_id: Union[Unset, None, str] = UNSET, - affects_flaw_embargoed: Union[Unset, None, bool] = UNSET, - affects_flaw_impact: Union[ - Unset, None, OsidbApiV1TrackersListAffectsFlawImpact - ] = UNSET, - affects_flaw_reported_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_reported_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_reported_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_source: Union[ - Unset, None, OsidbApiV1TrackersListAffectsFlawSource - ] = UNSET, - affects_flaw_unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_flaw_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_flaw_uuid: Union[Unset, None, str] = UNSET, - affects_impact: Union[Unset, None, OsidbApiV1TrackersListAffectsImpact] = UNSET, - affects_ps_component: Union[Unset, None, str] = UNSET, - affects_ps_module: Union[Unset, None, str] = UNSET, - affects_resolution: Union[ - Unset, None, OsidbApiV1TrackersListAffectsResolution - ] = UNSET, - affects_updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - affects_updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - affects_updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - affects_uuid: Union[Unset, None, str] = UNSET, - created_dt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_date: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - created_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - created_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - created_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - embargoed: Union[Unset, None, bool] = UNSET, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - external_system_id: Union[Unset, None, str] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, - limit: Union[Unset, None, int] = UNSET, - offset: Union[Unset, None, int] = UNSET, - order: Union[Unset, None, List[OsidbApiV1TrackersListOrderItem]] = UNSET, - ps_update_stream: Union[Unset, None, str] = UNSET, - resolution: Union[Unset, None, str] = UNSET, - status: Union[Unset, None, str] = UNSET, - type: Union[Unset, None, OsidbApiV1TrackersListType] = UNSET, - updated_dt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_date: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_gte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_date_lte: Union[Unset, None, datetime.date] = UNSET, - updated_dt_gt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_gte: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lt: Union[Unset, None, datetime.datetime] = UNSET, - updated_dt_lte: Union[Unset, None, datetime.datetime] = UNSET, - uuid: Union[Unset, None, str] = UNSET, + affects_affectedness: Union[Unset, OsidbApiV1TrackersListAffectsAffectedness] = UNSET, + affects_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_embargoed: Union[Unset, bool] = UNSET, + affects_flaw_components: Union[Unset, list[str]] = UNSET, + affects_flaw_created_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_cve_id: Union[Unset, str] = UNSET, + affects_flaw_cwe_id: Union[Unset, str] = UNSET, + affects_flaw_embargoed: Union[Unset, bool] = UNSET, + affects_flaw_impact: Union[Unset, OsidbApiV1TrackersListAffectsFlawImpact] = UNSET, + affects_flaw_reported_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_reported_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_reported_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_source: Union[Unset, OsidbApiV1TrackersListAffectsFlawSource] = UNSET, + affects_flaw_unembargo_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_flaw_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_flaw_uuid: Union[Unset, UUID] = UNSET, + affects_impact: Union[Unset, OsidbApiV1TrackersListAffectsImpact] = UNSET, + affects_ps_component: Union[Unset, str] = UNSET, + affects_ps_module: Union[Unset, str] = UNSET, + affects_resolution: Union[Unset, OsidbApiV1TrackersListAffectsResolution] = UNSET, + affects_updated_dt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_date: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + affects_updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + affects_updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + affects_uuid: Union[Unset, UUID] = UNSET, + created_dt: Union[Unset, datetime.datetime] = UNSET, + created_dt_date: Union[Unset, datetime.date] = UNSET, + created_dt_date_gte: Union[Unset, datetime.date] = UNSET, + created_dt_date_lte: Union[Unset, datetime.date] = UNSET, + created_dt_gt: Union[Unset, datetime.datetime] = UNSET, + created_dt_gte: Union[Unset, datetime.datetime] = UNSET, + created_dt_lt: Union[Unset, datetime.datetime] = UNSET, + created_dt_lte: Union[Unset, datetime.datetime] = UNSET, + embargoed: Union[Unset, bool] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + external_system_id: Union[Unset, str] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, + limit: Union[Unset, int] = UNSET, + offset: Union[Unset, int] = UNSET, + order: Union[Unset, list[OsidbApiV1TrackersListOrderItem]] = UNSET, + ps_update_stream: Union[Unset, str] = UNSET, + resolution: Union[Unset, str] = UNSET, + status: Union[Unset, str] = UNSET, + type_: Union[Unset, OsidbApiV1TrackersListType] = UNSET, + updated_dt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_date: Union[Unset, datetime.date] = UNSET, + updated_dt_date_gte: Union[Unset, datetime.date] = UNSET, + updated_dt_date_lte: Union[Unset, datetime.date] = UNSET, + updated_dt_gt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_gte: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lt: Union[Unset, datetime.datetime] = UNSET, + updated_dt_lte: Union[Unset, datetime.datetime] = UNSET, + uuid: Union[Unset, UUID] = UNSET, ) -> Optional[OsidbApiV1TrackersListResponse200]: - """ """ + """ + Args: + affects_affectedness (Union[Unset, OsidbApiV1TrackersListAffectsAffectedness]): + affects_created_dt (Union[Unset, datetime.datetime]): + affects_created_dt_date (Union[Unset, datetime.date]): + affects_created_dt_date_gte (Union[Unset, datetime.date]): + affects_created_dt_date_lte (Union[Unset, datetime.date]): + affects_created_dt_gt (Union[Unset, datetime.datetime]): + affects_created_dt_gte (Union[Unset, datetime.datetime]): + affects_created_dt_lt (Union[Unset, datetime.datetime]): + affects_created_dt_lte (Union[Unset, datetime.datetime]): + affects_embargoed (Union[Unset, bool]): + affects_flaw_components (Union[Unset, list[str]]): + affects_flaw_created_dt (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_date (Union[Unset, datetime.date]): + affects_flaw_created_dt_date_gte (Union[Unset, datetime.date]): + affects_flaw_created_dt_date_lte (Union[Unset, datetime.date]): + affects_flaw_created_dt_gt (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_gte (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_lt (Union[Unset, datetime.datetime]): + affects_flaw_created_dt_lte (Union[Unset, datetime.datetime]): + affects_flaw_cve_id (Union[Unset, str]): + affects_flaw_cwe_id (Union[Unset, str]): + affects_flaw_embargoed (Union[Unset, bool]): + affects_flaw_impact (Union[Unset, OsidbApiV1TrackersListAffectsFlawImpact]): + affects_flaw_reported_dt (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_date (Union[Unset, datetime.date]): + affects_flaw_reported_dt_date_gte (Union[Unset, datetime.date]): + affects_flaw_reported_dt_date_lte (Union[Unset, datetime.date]): + affects_flaw_reported_dt_gt (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_gte (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_lt (Union[Unset, datetime.datetime]): + affects_flaw_reported_dt_lte (Union[Unset, datetime.datetime]): + affects_flaw_source (Union[Unset, OsidbApiV1TrackersListAffectsFlawSource]): + affects_flaw_unembargo_dt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_date (Union[Unset, datetime.date]): + affects_flaw_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_flaw_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_flaw_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_flaw_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_flaw_uuid (Union[Unset, UUID]): + affects_impact (Union[Unset, OsidbApiV1TrackersListAffectsImpact]): + affects_ps_component (Union[Unset, str]): + affects_ps_module (Union[Unset, str]): + affects_resolution (Union[Unset, OsidbApiV1TrackersListAffectsResolution]): + affects_updated_dt (Union[Unset, datetime.datetime]): + affects_updated_dt_date (Union[Unset, datetime.date]): + affects_updated_dt_date_gte (Union[Unset, datetime.date]): + affects_updated_dt_date_lte (Union[Unset, datetime.date]): + affects_updated_dt_gt (Union[Unset, datetime.datetime]): + affects_updated_dt_gte (Union[Unset, datetime.datetime]): + affects_updated_dt_lt (Union[Unset, datetime.datetime]): + affects_updated_dt_lte (Union[Unset, datetime.datetime]): + affects_uuid (Union[Unset, UUID]): + created_dt (Union[Unset, datetime.datetime]): + created_dt_date (Union[Unset, datetime.date]): + created_dt_date_gte (Union[Unset, datetime.date]): + created_dt_date_lte (Union[Unset, datetime.date]): + created_dt_gt (Union[Unset, datetime.datetime]): + created_dt_gte (Union[Unset, datetime.datetime]): + created_dt_lt (Union[Unset, datetime.datetime]): + created_dt_lte (Union[Unset, datetime.datetime]): + embargoed (Union[Unset, bool]): + exclude_fields (Union[Unset, list[str]]): + external_system_id (Union[Unset, str]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + limit (Union[Unset, int]): + offset (Union[Unset, int]): + order (Union[Unset, list[OsidbApiV1TrackersListOrderItem]]): + ps_update_stream (Union[Unset, str]): + resolution (Union[Unset, str]): + status (Union[Unset, str]): + type_ (Union[Unset, OsidbApiV1TrackersListType]): + updated_dt (Union[Unset, datetime.datetime]): + updated_dt_date (Union[Unset, datetime.date]): + updated_dt_date_gte (Union[Unset, datetime.date]): + updated_dt_date_lte (Union[Unset, datetime.date]): + updated_dt_gt (Union[Unset, datetime.datetime]): + updated_dt_gte (Union[Unset, datetime.datetime]): + updated_dt_lt (Union[Unset, datetime.datetime]): + updated_dt_lte (Union[Unset, datetime.datetime]): + uuid (Union[Unset, UUID]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1TrackersListResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, affects_affectedness=affects_affectedness, affects_created_dt=affects_created_dt, @@ -1551,7 +1798,7 @@ async def async_( ps_update_stream=ps_update_stream, resolution=resolution, status=status, - type=type, + type_=type_, updated_dt=updated_dt, updated_dt_date=updated_dt_date, updated_dt_date_gte=updated_dt_date_gte, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_retrieve.py index 0edec26..8cef1fa 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_retrieve.py @@ -1,106 +1,111 @@ -from typing import Any, Dict, List, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_trackers_retrieve_response_200 import ( OsidbApiV1TrackersRetrieveResponse200, ) from ...types import UNSET, Response, Unset QUERY_PARAMS = { - "exclude_fields": List[str], - "include_fields": List[str], - "include_meta_attr": List[str], + "exclude_fields": list[str], + "include_fields": list[str], + "include_meta_attr": list[str], } def _get_kwargs( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/trackers/{uuid}".format( - client.base_url, - uuid=uuid, - ) - - headers: Dict[str, Any] = client.get_headers() + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - json_exclude_fields: Union[Unset, None, List[str]] = UNSET + json_exclude_fields: Union[Unset, list[str]] = UNSET if not isinstance(exclude_fields, Unset): - if exclude_fields is None: - json_exclude_fields = None - else: - json_exclude_fields = exclude_fields + json_exclude_fields = exclude_fields + + params["exclude_fields"] = json_exclude_fields - json_include_fields: Union[Unset, None, List[str]] = UNSET + json_include_fields: Union[Unset, list[str]] = UNSET if not isinstance(include_fields, Unset): - if include_fields is None: - json_include_fields = None - else: - json_include_fields = include_fields + json_include_fields = include_fields - json_include_meta_attr: Union[Unset, None, List[str]] = UNSET + params["include_fields"] = json_include_fields + + json_include_meta_attr: Union[Unset, list[str]] = UNSET if not isinstance(include_meta_attr, Unset): - if include_meta_attr is None: - json_include_meta_attr = None - else: - json_include_meta_attr = include_meta_attr + json_include_meta_attr = include_meta_attr + + params["include_meta_attr"] = json_include_meta_attr - params: Dict[str, Any] = { - "exclude_fields": json_exclude_fields, - "include_fields": json_include_fields, - "include_meta_attr": json_include_meta_attr, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/trackers/{uuid}".format( + uuid=uuid, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1TrackersRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1TrackersRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = OsidbApiV1TrackersRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = OsidbApiV1TrackersRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1TrackersRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1TrackersRetrieveResponse200]: + """ + Args: + uuid (UUID): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1TrackersRetrieveResponse200] + """ + kwargs = _get_kwargs( uuid=uuid, client=client, @@ -117,18 +122,31 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1TrackersRetrieveResponse200]: - """ """ + """ + Args: + uuid (UUID): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1TrackersRetrieveResponse200 + """ return sync_detailed( uuid=uuid, @@ -139,14 +157,29 @@ def sync( ).parsed -async def async_detailed( - uuid: str, +async def asyncio_detailed( + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, ) -> Response[OsidbApiV1TrackersRetrieveResponse200]: + """ + Args: + uuid (UUID): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1TrackersRetrieveResponse200] + """ + kwargs = _get_kwargs( uuid=uuid, client=client, @@ -163,21 +196,34 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - uuid: str, +async def asyncio( + uuid: UUID, *, client: AuthenticatedClient, - exclude_fields: Union[Unset, None, List[str]] = UNSET, - include_fields: Union[Unset, None, List[str]] = UNSET, - include_meta_attr: Union[Unset, None, List[str]] = UNSET, + exclude_fields: Union[Unset, list[str]] = UNSET, + include_fields: Union[Unset, list[str]] = UNSET, + include_meta_attr: Union[Unset, list[str]] = UNSET, ) -> Optional[OsidbApiV1TrackersRetrieveResponse200]: - """ """ + """ + Args: + uuid (UUID): + exclude_fields (Union[Unset, list[str]]): + include_fields (Union[Unset, list[str]]): + include_meta_attr (Union[Unset, list[str]]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1TrackersRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( uuid=uuid, client=client, exclude_fields=exclude_fields, diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_update.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_update.py index b27805f..eacdbf4 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_update.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_api_v1_trackers_update.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union +from uuid import UUID import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_api_v1_trackers_update_response_200 import ( OsidbApiV1TrackersUpdateResponse200, ) @@ -10,43 +12,45 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = Tracker def _get_kwargs( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - form_data: Tracker, - multipart_data: Tracker, - json_body: Tracker, -) -> Dict[str, Any]: - url = "{}/osidb/api/v1/trackers/{uuid}".format( - client.base_url, - uuid=uuid, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + Tracker, + Tracker, + Tracker, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//osidb/api/v1/trackers/{uuid}".format( + uuid=uuid, + ), + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, Tracker): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbApiV1TrackersUpdateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbApiV1TrackersUpdateResponse200 if isinstance(_response_200, Unset): @@ -55,34 +59,50 @@ def _parse_response( response_200 = OsidbApiV1TrackersUpdateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbApiV1TrackersUpdateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - form_data: Tracker, - multipart_data: Tracker, - json_body: Tracker, + body: Union[ + Tracker, + Tracker, + Tracker, + ], ) -> Response[OsidbApiV1TrackersUpdateResponse200]: + """ + Args: + uuid (UUID): + bugzilla_api_key (str): + jira_api_key (str): + body (Tracker): Tracker serializer + body (Tracker): Tracker serializer + body (Tracker): Tracker serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1TrackersUpdateResponse200] + """ + kwargs = _get_kwargs( uuid=uuid, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.put( @@ -93,42 +113,74 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( - uuid: str, + uuid: UUID, *, client: AuthenticatedClient, - form_data: Tracker, - multipart_data: Tracker, - json_body: Tracker, + body: Union[ + Tracker, + Tracker, + Tracker, + ], ) -> Optional[OsidbApiV1TrackersUpdateResponse200]: - """ """ + """ + Args: + uuid (UUID): + bugzilla_api_key (str): + jira_api_key (str): + body (Tracker): Tracker serializer + body (Tracker): Tracker serializer + body (Tracker): Tracker serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1TrackersUpdateResponse200 + """ return sync_detailed( uuid=uuid, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( - uuid: str, +async def asyncio_detailed( + uuid: UUID, *, client: AuthenticatedClient, - form_data: Tracker, - multipart_data: Tracker, - json_body: Tracker, + body: Union[ + Tracker, + Tracker, + Tracker, + ], ) -> Response[OsidbApiV1TrackersUpdateResponse200]: + """ + Args: + uuid (UUID): + bugzilla_api_key (str): + jira_api_key (str): + body (Tracker): Tracker serializer + body (Tracker): Tracker serializer + body (Tracker): Tracker serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbApiV1TrackersUpdateResponse200] + """ + kwargs = _get_kwargs( uuid=uuid, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().put( @@ -139,25 +191,40 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( - uuid: str, +async def asyncio( + uuid: UUID, *, client: AuthenticatedClient, - form_data: Tracker, - multipart_data: Tracker, - json_body: Tracker, + body: Union[ + Tracker, + Tracker, + Tracker, + ], ) -> Optional[OsidbApiV1TrackersUpdateResponse200]: - """ """ + """ + Args: + uuid (UUID): + bugzilla_api_key (str): + jira_api_key (str): + body (Tracker): Tracker serializer + body (Tracker): Tracker serializer + body (Tracker): Tracker serializer + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbApiV1TrackersUpdateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( uuid=uuid, client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_healthy_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_healthy_retrieve.py index 8368471..6092e3c 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_healthy_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_healthy_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_healthy_retrieve_response_200 import ( OsidbHealthyRetrieveResponse200, ) @@ -14,23 +15,19 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/healthy".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/healthy", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbHealthyRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbHealthyRetrieveResponse200 if isinstance(_response_200, Unset): @@ -39,17 +36,16 @@ def _parse_response( response_200 = OsidbHealthyRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbHealthyRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -57,6 +53,16 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[OsidbHealthyRetrieveResponse200]: + """unauthenticated view providing healthcheck on osidb service + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbHealthyRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -69,24 +75,42 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[OsidbHealthyRetrieveResponse200]: - """unauthenticated view providing healthcheck on osidb service""" + """unauthenticated view providing healthcheck on osidb service + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbHealthyRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[OsidbHealthyRetrieveResponse200]: + """unauthenticated view providing healthcheck on osidb service + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbHealthyRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -99,17 +123,25 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[OsidbHealthyRetrieveResponse200]: - """unauthenticated view providing healthcheck on osidb service""" + """unauthenticated view providing healthcheck on osidb service + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbHealthyRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/osidb/osidb_whoami_retrieve.py b/osidb_bindings/bindings/python_client/api/osidb/osidb_whoami_retrieve.py index 92aa8c2..8773022 100644 --- a/osidb_bindings/bindings/python_client/api/osidb/osidb_whoami_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/osidb/osidb_whoami_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.osidb_whoami_retrieve_response_200 import OsidbWhoamiRetrieveResponse200 from ...types import UNSET, Response, Unset @@ -12,23 +13,19 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/osidb/whoami".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/osidb/whoami", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[OsidbWhoamiRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: OsidbWhoamiRetrieveResponse200 if isinstance(_response_200, Unset): @@ -37,17 +34,16 @@ def _parse_response( response_200 = OsidbWhoamiRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[OsidbWhoamiRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -55,6 +51,16 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[OsidbWhoamiRetrieveResponse200]: + """View that provides information about the currently logged-in user + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbWhoamiRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -67,24 +73,42 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[OsidbWhoamiRetrieveResponse200]: - """View that provides information about the currently logged-in user""" + """View that provides information about the currently logged-in user + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbWhoamiRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[OsidbWhoamiRetrieveResponse200]: + """View that provides information about the currently logged-in user + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[OsidbWhoamiRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -97,17 +121,25 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[OsidbWhoamiRetrieveResponse200]: - """View that provides information about the currently logged-in user""" + """View that provides information about the currently logged-in user + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + OsidbWhoamiRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/trackers/trackers_api_v1_file_create.py b/osidb_bindings/bindings/python_client/api/trackers/trackers_api_v1_file_create.py index 2e13e54..0482614 100644 --- a/osidb_bindings/bindings/python_client/api/trackers/trackers_api_v1_file_create.py +++ b/osidb_bindings/bindings/python_client/api/trackers/trackers_api_v1_file_create.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.flaw_uuid_list import FlawUUIDList from ...models.trackers_api_v1_file_create_response_200 import ( TrackersApiV1FileCreateResponse200, @@ -10,41 +11,42 @@ from ...types import UNSET, Response, Unset QUERY_PARAMS = {} + REQUEST_BODY_TYPE = FlawUUIDList def _get_kwargs( *, client: AuthenticatedClient, - form_data: FlawUUIDList, - multipart_data: FlawUUIDList, - json_body: FlawUUIDList, -) -> Dict[str, Any]: - url = "{}/trackers/api/v1/file".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() + body: Union[ + FlawUUIDList, + FlawUUIDList, + FlawUUIDList, + ], +) -> dict[str, Any]: + headers: dict[str, Any] = client.get_headers() + + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/trackers/api/v1/file", + } - json_json_body: Dict[str, Any] = UNSET - if not isinstance(json_body, Unset): - json_body.to_dict() + if isinstance(body, FlawUUIDList): + _json_body: dict[str, Any] = UNSET + if not isinstance(body, Unset): + _json_body = body.to_dict() - multipart_multipart_data: Dict[str, Any] = UNSET - if not isinstance(multipart_data, Unset): - multipart_data.to_multipart() + _kwargs["json"] = _json_body + headers["Content-Type"] = "application/json" - return { - "url": url, - "headers": headers, - "json": form_data.to_dict(), - } + _kwargs["headers"] = headers + return _kwargs def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[TrackersApiV1FileCreateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: TrackersApiV1FileCreateResponse200 if isinstance(_response_200, Unset): @@ -53,32 +55,46 @@ def _parse_response( response_200 = TrackersApiV1FileCreateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[TrackersApiV1FileCreateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) def sync_detailed( *, client: AuthenticatedClient, - form_data: FlawUUIDList, - multipart_data: FlawUUIDList, - json_body: FlawUUIDList, + body: Union[ + FlawUUIDList, + FlawUUIDList, + FlawUUIDList, + ], ) -> Response[TrackersApiV1FileCreateResponse200]: + """Given a list of flaws, generates a list of suggested trackers to file. + + Args: + body (FlawUUIDList): + body (FlawUUIDList): + body (FlawUUIDList): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[TrackersApiV1FileCreateResponse200] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) response = requests.post( @@ -89,38 +105,66 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, - form_data: FlawUUIDList, - multipart_data: FlawUUIDList, - json_body: FlawUUIDList, + body: Union[ + FlawUUIDList, + FlawUUIDList, + FlawUUIDList, + ], ) -> Optional[TrackersApiV1FileCreateResponse200]: - """Given a list of flaws, generates a list of suggested trackers to file.""" + """Given a list of flaws, generates a list of suggested trackers to file. + + Args: + body (FlawUUIDList): + body (FlawUUIDList): + body (FlawUUIDList): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + TrackersApiV1FileCreateResponse200 + """ return sync_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, - form_data: FlawUUIDList, - multipart_data: FlawUUIDList, - json_body: FlawUUIDList, + body: Union[ + FlawUUIDList, + FlawUUIDList, + FlawUUIDList, + ], ) -> Response[TrackersApiV1FileCreateResponse200]: + """Given a list of flaws, generates a list of suggested trackers to file. + + Args: + body (FlawUUIDList): + body (FlawUUIDList): + body (FlawUUIDList): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[TrackersApiV1FileCreateResponse200] + """ + kwargs = _get_kwargs( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) async with client.get_async_session().post( @@ -131,23 +175,36 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, - form_data: FlawUUIDList, - multipart_data: FlawUUIDList, - json_body: FlawUUIDList, + body: Union[ + FlawUUIDList, + FlawUUIDList, + FlawUUIDList, + ], ) -> Optional[TrackersApiV1FileCreateResponse200]: - """Given a list of flaws, generates a list of suggested trackers to file.""" + """Given a list of flaws, generates a list of suggested trackers to file. + + Args: + body (FlawUUIDList): + body (FlawUUIDList): + body (FlawUUIDList): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + TrackersApiV1FileCreateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, - form_data=form_data, - multipart_data=multipart_data, - json_body=json_body, + body=body, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/workflows/workflows_api_v1_workflows_adjust_create.py b/osidb_bindings/bindings/python_client/api/workflows/workflows_api_v1_workflows_adjust_create.py index 3759077..adab9a3 100644 --- a/osidb_bindings/bindings/python_client/api/workflows/workflows_api_v1_workflows_adjust_create.py +++ b/osidb_bindings/bindings/python_client/api/workflows/workflows_api_v1_workflows_adjust_create.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.workflows_api_v1_workflows_adjust_create_response_200 import ( WorkflowsApiV1WorkflowsAdjustCreateResponse200, ) @@ -15,45 +16,39 @@ def _get_kwargs( id: str, *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/workflows/api/v1/workflows/{id}/adjust".format( - client.base_url, - id=id, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//workflows/api/v1/workflows/{id}/adjust".format( + id=id, + ), } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[WorkflowsApiV1WorkflowsAdjustCreateResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: WorkflowsApiV1WorkflowsAdjustCreateResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = WorkflowsApiV1WorkflowsAdjustCreateResponse200.from_dict( - _response_200 - ) + response_200 = WorkflowsApiV1WorkflowsAdjustCreateResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[WorkflowsApiV1WorkflowsAdjustCreateResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -62,6 +57,25 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[WorkflowsApiV1WorkflowsAdjustCreateResponse200]: + """workflow adjustion API endpoint + + adjust workflow classification of flaw identified by UUID or CVE + and return its workflow:state classification (new if changed and old otherwise) + + adjust operation is idempotent so when the classification + is already adjusted running it results in no operation + + Args: + id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[WorkflowsApiV1WorkflowsAdjustCreateResponse200] + """ + kwargs = _get_kwargs( id=id, client=client, @@ -75,7 +89,7 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( @@ -89,7 +103,18 @@ def sync( and return its workflow:state classification (new if changed and old otherwise) adjust operation is idempotent so when the classification - is already adjusted running it results in no operation""" + is already adjusted running it results in no operation + + Args: + id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + WorkflowsApiV1WorkflowsAdjustCreateResponse200 + """ return sync_detailed( id=id, @@ -97,11 +122,30 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( id: str, *, client: AuthenticatedClient, ) -> Response[WorkflowsApiV1WorkflowsAdjustCreateResponse200]: + """workflow adjustion API endpoint + + adjust workflow classification of flaw identified by UUID or CVE + and return its workflow:state classification (new if changed and old otherwise) + + adjust operation is idempotent so when the classification + is already adjusted running it results in no operation + + Args: + id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[WorkflowsApiV1WorkflowsAdjustCreateResponse200] + """ + kwargs = _get_kwargs( id=id, client=client, @@ -115,10 +159,10 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( id: str, *, client: AuthenticatedClient, @@ -129,10 +173,21 @@ async def async_( and return its workflow:state classification (new if changed and old otherwise) adjust operation is idempotent so when the classification - is already adjusted running it results in no operation""" + is already adjusted running it results in no operation + + Args: + id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + WorkflowsApiV1WorkflowsAdjustCreateResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( id=id, client=client, ) diff --git a/osidb_bindings/bindings/python_client/api/workflows/workflows_api_v1_workflows_retrieve.py b/osidb_bindings/bindings/python_client/api/workflows/workflows_api_v1_workflows_retrieve.py index 5622123..894ed97 100644 --- a/osidb_bindings/bindings/python_client/api/workflows/workflows_api_v1_workflows_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/workflows/workflows_api_v1_workflows_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.workflows_api_v1_workflows_retrieve_response_200 import ( WorkflowsApiV1WorkflowsRetrieveResponse200, ) @@ -14,44 +15,37 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/workflows/api/v1/workflows".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/workflows/api/v1/workflows", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[WorkflowsApiV1WorkflowsRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: WorkflowsApiV1WorkflowsRetrieveResponse200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = WorkflowsApiV1WorkflowsRetrieveResponse200.from_dict( - _response_200 - ) + response_200 = WorkflowsApiV1WorkflowsRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[WorkflowsApiV1WorkflowsRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -59,6 +53,16 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[WorkflowsApiV1WorkflowsRetrieveResponse200]: + """workflow info API endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[WorkflowsApiV1WorkflowsRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -71,24 +75,42 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[WorkflowsApiV1WorkflowsRetrieveResponse200]: - """workflow info API endpoint""" + """workflow info API endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + WorkflowsApiV1WorkflowsRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[WorkflowsApiV1WorkflowsRetrieveResponse200]: + """workflow info API endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[WorkflowsApiV1WorkflowsRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -101,17 +123,25 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[WorkflowsApiV1WorkflowsRetrieveResponse200]: - """workflow info API endpoint""" + """workflow info API endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + WorkflowsApiV1WorkflowsRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/workflows/workflows_api_v1_workflows_retrieve_2.py b/osidb_bindings/bindings/python_client/api/workflows/workflows_api_v1_workflows_retrieve_2.py index e29fd1f..89e107e 100644 --- a/osidb_bindings/bindings/python_client/api/workflows/workflows_api_v1_workflows_retrieve_2.py +++ b/osidb_bindings/bindings/python_client/api/workflows/workflows_api_v1_workflows_retrieve_2.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional, Union +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.workflows_api_v1_workflows_retrieve_2_response_200 import ( WorkflowsApiV1WorkflowsRetrieve2Response200, ) @@ -17,52 +18,47 @@ def _get_kwargs( id: str, *, client: AuthenticatedClient, - verbose: Union[Unset, None, bool] = UNSET, -) -> Dict[str, Any]: - url = "{}/workflows/api/v1/workflows/{id}".format( - client.base_url, - id=id, - ) + verbose: Union[Unset, bool] = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} - headers: Dict[str, Any] = client.get_headers() + params["verbose"] = verbose - params: Dict[str, Any] = { - "verbose": verbose, - } params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { - "url": url, - "headers": headers, + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}//workflows/api/v1/workflows/{id}".format( + id=id, + ), "params": params, } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[WorkflowsApiV1WorkflowsRetrieve2Response200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: WorkflowsApiV1WorkflowsRetrieve2Response200 if isinstance(_response_200, Unset): response_200 = UNSET else: - response_200 = WorkflowsApiV1WorkflowsRetrieve2Response200.from_dict( - _response_200 - ) + response_200 = WorkflowsApiV1WorkflowsRetrieve2Response200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[WorkflowsApiV1WorkflowsRetrieve2Response200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -70,8 +66,29 @@ def sync_detailed( id: str, *, client: AuthenticatedClient, - verbose: Union[Unset, None, bool] = UNSET, + verbose: Union[Unset, bool] = UNSET, ) -> Response[WorkflowsApiV1WorkflowsRetrieve2Response200]: + """workflow classification API endpoint + + for flaw identified by UUID or CVE returns its workflow:state classification + + params: + + verbose - return also workflows with flaw classification + which represents the reasoning of the result + + Args: + id (str): + verbose (Union[Unset, bool]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[WorkflowsApiV1WorkflowsRetrieve2Response200] + """ + kwargs = _get_kwargs( id=id, client=client, @@ -86,14 +103,14 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( id: str, *, client: AuthenticatedClient, - verbose: Union[Unset, None, bool] = UNSET, + verbose: Union[Unset, bool] = UNSET, ) -> Optional[WorkflowsApiV1WorkflowsRetrieve2Response200]: """workflow classification API endpoint @@ -102,7 +119,19 @@ def sync( params: verbose - return also workflows with flaw classification - which represents the reasoning of the result""" + which represents the reasoning of the result + + Args: + id (str): + verbose (Union[Unset, bool]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + WorkflowsApiV1WorkflowsRetrieve2Response200 + """ return sync_detailed( id=id, @@ -111,12 +140,33 @@ def sync( ).parsed -async def async_detailed( +async def asyncio_detailed( id: str, *, client: AuthenticatedClient, - verbose: Union[Unset, None, bool] = UNSET, + verbose: Union[Unset, bool] = UNSET, ) -> Response[WorkflowsApiV1WorkflowsRetrieve2Response200]: + """workflow classification API endpoint + + for flaw identified by UUID or CVE returns its workflow:state classification + + params: + + verbose - return also workflows with flaw classification + which represents the reasoning of the result + + Args: + id (str): + verbose (Union[Unset, bool]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[WorkflowsApiV1WorkflowsRetrieve2Response200] + """ + kwargs = _get_kwargs( id=id, client=client, @@ -131,14 +181,14 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( id: str, *, client: AuthenticatedClient, - verbose: Union[Unset, None, bool] = UNSET, + verbose: Union[Unset, bool] = UNSET, ) -> Optional[WorkflowsApiV1WorkflowsRetrieve2Response200]: """workflow classification API endpoint @@ -147,10 +197,22 @@ async def async_( params: verbose - return also workflows with flaw classification - which represents the reasoning of the result""" + which represents the reasoning of the result + + Args: + id (str): + verbose (Union[Unset, bool]): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + WorkflowsApiV1WorkflowsRetrieve2Response200 + """ return ( - await async_detailed( + await asyncio_detailed( id=id, client=client, verbose=verbose, diff --git a/osidb_bindings/bindings/python_client/api/workflows/workflows_healthy_retrieve.py b/osidb_bindings/bindings/python_client/api/workflows/workflows_healthy_retrieve.py index bc1d14a..3872fe5 100644 --- a/osidb_bindings/bindings/python_client/api/workflows/workflows_healthy_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/workflows/workflows_healthy_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.workflows_healthy_retrieve_response_200 import ( WorkflowsHealthyRetrieveResponse200, ) @@ -14,23 +15,19 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/workflows/healthy".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/workflows/healthy", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[WorkflowsHealthyRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: WorkflowsHealthyRetrieveResponse200 if isinstance(_response_200, Unset): @@ -39,17 +36,16 @@ def _parse_response( response_200 = WorkflowsHealthyRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[WorkflowsHealthyRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -57,6 +53,16 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[WorkflowsHealthyRetrieveResponse200]: + """unauthenticated health check API endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[WorkflowsHealthyRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -69,24 +75,42 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[WorkflowsHealthyRetrieveResponse200]: - """unauthenticated health check API endpoint""" + """unauthenticated health check API endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + WorkflowsHealthyRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[WorkflowsHealthyRetrieveResponse200]: + """unauthenticated health check API endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[WorkflowsHealthyRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -99,17 +123,25 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[WorkflowsHealthyRetrieveResponse200]: - """unauthenticated health check API endpoint""" + """unauthenticated health check API endpoint + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + WorkflowsHealthyRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/api/workflows/workflows_retrieve.py b/osidb_bindings/bindings/python_client/api/workflows/workflows_retrieve.py index 89cc139..468f04f 100644 --- a/osidb_bindings/bindings/python_client/api/workflows/workflows_retrieve.py +++ b/osidb_bindings/bindings/python_client/api/workflows/workflows_retrieve.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional +from http import HTTPStatus +from typing import Any, Optional, Union import requests -from ...client import AuthenticatedClient +from ...client import AuthenticatedClient, Client from ...models.workflows_retrieve_response_200 import WorkflowsRetrieveResponse200 from ...types import UNSET, Response, Unset @@ -12,23 +13,19 @@ def _get_kwargs( *, client: AuthenticatedClient, -) -> Dict[str, Any]: - url = "{}/workflows/".format( - client.base_url, - ) - - headers: Dict[str, Any] = client.get_headers() - - return { - "url": url, - "headers": headers, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "url": f"{client.base_url}/workflows/", } + return _kwargs + def _parse_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Optional[WorkflowsRetrieveResponse200]: if response.status_code == 200: + # } _response_200 = response.json() response_200: WorkflowsRetrieveResponse200 if isinstance(_response_200, Unset): @@ -37,17 +34,16 @@ def _parse_response( response_200 = WorkflowsRetrieveResponse200.from_dict(_response_200) return response_200 - return None def _build_response( - *, response: requests.Response + *, client: Union[AuthenticatedClient, Client], response: requests.Response ) -> Response[WorkflowsRetrieveResponse200]: return Response( - status_code=response.status_code, + status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, - parsed=_parse_response(response=response), + parsed=_parse_response(client=client, response=response), ) @@ -55,6 +51,16 @@ def sync_detailed( *, client: AuthenticatedClient, ) -> Response[WorkflowsRetrieveResponse200]: + """index API endpoint listing available API endpoints + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[WorkflowsRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -67,24 +73,42 @@ def sync_detailed( ) response.raise_for_status() - return _build_response(response=response) + return _build_response(client=client, response=response) def sync( *, client: AuthenticatedClient, ) -> Optional[WorkflowsRetrieveResponse200]: - """index API endpoint listing available API endpoints""" + """index API endpoint listing available API endpoints + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + WorkflowsRetrieveResponse200 + """ return sync_detailed( client=client, ).parsed -async def async_detailed( +async def asyncio_detailed( *, client: AuthenticatedClient, ) -> Response[WorkflowsRetrieveResponse200]: + """index API endpoint listing available API endpoints + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[WorkflowsRetrieveResponse200] + """ + kwargs = _get_kwargs( client=client, ) @@ -97,17 +121,25 @@ async def async_detailed( resp.status_code = response.status resp._content = content - return _build_response(response=resp) + return _build_response(client=client, response=resp) -async def async_( +async def asyncio( *, client: AuthenticatedClient, ) -> Optional[WorkflowsRetrieveResponse200]: - """index API endpoint listing available API endpoints""" + """index API endpoint listing available API endpoints + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + WorkflowsRetrieveResponse200 + """ return ( - await async_detailed( + await asyncio_detailed( client=client, ) ).parsed diff --git a/osidb_bindings/bindings/python_client/client.py b/osidb_bindings/bindings/python_client/client.py index e4bc772..43c4e73 100644 --- a/osidb_bindings/bindings/python_client/client.py +++ b/osidb_bindings/bindings/python_client/client.py @@ -43,25 +43,19 @@ def with_timeout(self, timeout: float) -> "Client": class AuthenticatedClient(Client): """A Client which has been authenticated for use on secured endpoints""" - auth: Union[None, Tuple[str, str], Type[requests.auth.AuthBase]] = attr.ib( - None, kw_only=True - ) + auth: Union[None, Tuple[str, str], Type[requests.auth.AuthBase]] = attr.ib(None, kw_only=True) async_session: Union[None, aiohttp.ClientSession] = attr.ib(None, kw_only=True) def get_auth(self) -> Union[None, Tuple[str, str], Type[requests.auth.AuthBase]]: return self.auth - def with_auth( - self, auth: Union[None, Tuple[str, str], Type[requests.auth.AuthBase]] - ) -> "Client": + def with_auth(self, auth: Union[None, Tuple[str, str], Type[requests.auth.AuthBase]]) -> "Client": """Get a new client matching this one with a new auth method""" return attr.evolve(self, auth=auth) def get_async_session(self) -> Union[None, aiohttp.ClientSession]: return self.async_session - def with_async_session( - self, async_session: Union[None, aiohttp.ClientSession] - ) -> "Client": + def with_async_session(self, async_session: Union[None, aiohttp.ClientSession]) -> "Client": """Get a new client matching this one with a new async session""" return attr.evolve(self, async_session=async_session) diff --git a/osidb_bindings/bindings/python_client/errors.py b/osidb_bindings/bindings/python_client/errors.py new file mode 100644 index 0000000..5f92e76 --- /dev/null +++ b/osidb_bindings/bindings/python_client/errors.py @@ -0,0 +1,16 @@ +"""Contains shared errors types that can be raised from API functions""" + + +class UnexpectedStatus(Exception): + """Raised by api functions when the response status an undocumented status and Client.raise_on_unexpected_status is True""" + + def __init__(self, status_code: int, content: bytes): + self.status_code = status_code + self.content = content + + super().__init__( + f"Unexpected status code: {status_code}\n\nResponse content:\n{content.decode(errors='ignore')}" + ) + + +__all__ = ["UnexpectedStatus"] diff --git a/osidb_bindings/bindings/python_client/models/__init__.py b/osidb_bindings/bindings/python_client/models/__init__.py index 502a24e..8fbbb9e 100644 --- a/osidb_bindings/bindings/python_client/models/__init__.py +++ b/osidb_bindings/bindings/python_client/models/__init__.py @@ -1,4 +1,4 @@ -""" Contains all the data models used in inputs/outputs """ +"""Contains all the data models used in inputs/outputs""" from .affect import Affect from .affect_bulk_post_put_response import AffectBulkPostPutResponse @@ -26,8 +26,8 @@ from .collectors_api_v1_status_retrieve_response_200_collectors_item_data import ( CollectorsApiV1StatusRetrieveResponse200CollectorsItemData, ) -from .collectors_api_v1_status_retrieve_response_200_collectors_item_error import ( - CollectorsApiV1StatusRetrieveResponse200CollectorsItemError, +from .collectors_api_v1_status_retrieve_response_200_collectors_item_error_type_0 import ( + CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0, ) from .collectors_api_v1_status_retrieve_response_200_collectors_item_state import ( CollectorsApiV1StatusRetrieveResponse200CollectorsItemState, @@ -377,3 +377,213 @@ ) from .workflows_healthy_retrieve_response_200 import WorkflowsHealthyRetrieveResponse200 from .workflows_retrieve_response_200 import WorkflowsRetrieveResponse200 + +__all__ = ( + "Affect", + "AffectBulkPostPutResponse", + "AffectBulkPut", + "AffectCVSS", + "AffectCVSSPost", + "AffectCVSSPut", + "AffectednessEnum", + "AffectPost", + "AffectReportData", + "Alert", + "AlertTypeEnum", + "Audit", + "AuthTokenCreateResponse200", + "AuthTokenRefreshCreateResponse200", + "AuthTokenRetrieveResponse200", + "AuthTokenVerifyCreateResponse200", + "BlankEnum", + "CollectorsApiV1StatusRetrieveResponse200", + "CollectorsApiV1StatusRetrieveResponse200CollectorsItem", + "CollectorsApiV1StatusRetrieveResponse200CollectorsItemData", + "CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0", + "CollectorsApiV1StatusRetrieveResponse200CollectorsItemState", + "CollectorsHealthyRetrieveResponse200", + "CollectorsRetrieveResponse200", + "Comment", + "CvssVersionEnum", + "EPSS", + "Erratum", + "ExploitOnlyReportData", + "ExploitOnlyReportDataSourceEnum", + "ExploitsApiV1CollectUpdateResponse200", + "ExploitsApiV1CveMapRetrieveResponse200", + "ExploitsApiV1CveMapRetrieveResponse200Cves", + "ExploitsApiV1EpssListResponse200", + "ExploitsApiV1FlawDataListResponse200", + "ExploitsApiV1ReportDataListResponse200", + "ExploitsApiV1ReportDateRetrieveResponse200", + "ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem", + "ExploitsApiV1ReportDateRetrieveResponse200NoActionItem", + "ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem", + "ExploitsApiV1ReportExplanationsRetrieveResponse200", + "ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem", + "ExploitsApiV1ReportPendingRetrieveResponse200", + "ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem", + "ExploitsApiV1StatusRetrieveResponse200", + "ExploitsApiV1SupportedProductsListResponse200", + "Flaw", + "FlawAcknowledgment", + "FlawAcknowledgmentPost", + "FlawAcknowledgmentPut", + "FlawClassification", + "FlawClassificationState", + "FlawComment", + "FlawCommentPost", + "FlawCVSS", + "FlawCVSSPost", + "FlawCVSSPut", + "FlawPackageVersion", + "FlawPackageVersionPost", + "FlawPackageVersionPut", + "FlawPost", + "FlawPostClassification", + "FlawPostClassificationState", + "FlawReference", + "FlawReferencePost", + "FlawReferencePut", + "FlawReferenceType", + "FlawReportData", + "FlawUUIDList", + "FlawVersion", + "ImpactEnum", + "IssuerEnum", + "MajorIncidentStateEnum", + "MaturityPreliminaryEnum", + "ModuleComponent", + "NistCvssValidationEnum", + "OsidbApiV1AffectsBulkCreateResponse200", + "OsidbApiV1AffectsBulkDestroyResponse200", + "OsidbApiV1AffectsBulkUpdateResponse200", + "OsidbApiV1AffectsCreateResponse201", + "OsidbApiV1AffectsCvssScoresCreateResponse201", + "OsidbApiV1AffectsCvssScoresDestroyResponse200", + "OsidbApiV1AffectsCvssScoresListIssuer", + "OsidbApiV1AffectsCvssScoresListResponse200", + "OsidbApiV1AffectsCvssScoresRetrieveResponse200", + "OsidbApiV1AffectsCvssScoresUpdateResponse200", + "OsidbApiV1AffectsDestroyResponse200", + "OsidbApiV1AffectsListAffectedness", + "OsidbApiV1AffectsListCvssScoresIssuer", + "OsidbApiV1AffectsListFlawImpact", + "OsidbApiV1AffectsListFlawSource", + "OsidbApiV1AffectsListImpact", + "OsidbApiV1AffectsListOrderItem", + "OsidbApiV1AffectsListResolution", + "OsidbApiV1AffectsListResponse200", + "OsidbApiV1AffectsListTrackersType", + "OsidbApiV1AffectsRetrieveResponse200", + "OsidbApiV1AffectsUpdateResponse200", + "OsidbApiV1AlertsListAlertType", + "OsidbApiV1AlertsListParentModel", + "OsidbApiV1AlertsListResponse200", + "OsidbApiV1AlertsRetrieveResponse200", + "OsidbApiV1AuditListResponse200", + "OsidbApiV1AuditRetrieveResponse200", + "OsidbApiV1AuditUpdateResponse200", + "OsidbApiV1FlawsAcknowledgmentsCreateResponse201", + "OsidbApiV1FlawsAcknowledgmentsDestroyResponse200", + "OsidbApiV1FlawsAcknowledgmentsListResponse200", + "OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200", + "OsidbApiV1FlawsAcknowledgmentsUpdateResponse200", + "OsidbApiV1FlawsCommentsCreateResponse201", + "OsidbApiV1FlawsCommentsListResponse200", + "OsidbApiV1FlawsCommentsRetrieveResponse200", + "OsidbApiV1FlawsCreateResponse201", + "OsidbApiV1FlawsCvssScoresCreateResponse201", + "OsidbApiV1FlawsCvssScoresDestroyResponse200", + "OsidbApiV1FlawsCvssScoresListIssuer", + "OsidbApiV1FlawsCvssScoresListResponse200", + "OsidbApiV1FlawsCvssScoresRetrieveResponse200", + "OsidbApiV1FlawsCvssScoresUpdateResponse200", + "OsidbApiV1FlawsListAffectsAffectedness", + "OsidbApiV1FlawsListAffectsImpact", + "OsidbApiV1FlawsListAffectsResolution", + "OsidbApiV1FlawsListAffectsTrackersType", + "OsidbApiV1FlawsListCvssScoresIssuer", + "OsidbApiV1FlawsListImpact", + "OsidbApiV1FlawsListMajorIncidentState", + "OsidbApiV1FlawsListNistCvssValidation", + "OsidbApiV1FlawsListOrderItem", + "OsidbApiV1FlawsListReferencesType", + "OsidbApiV1FlawsListRequiresCveDescription", + "OsidbApiV1FlawsListResponse200", + "OsidbApiV1FlawsListSource", + "OsidbApiV1FlawsListWorkflowStateItem", + "OsidbApiV1FlawsPackageVersionsCreateResponse201", + "OsidbApiV1FlawsPackageVersionsDestroyResponse200", + "OsidbApiV1FlawsPackageVersionsListResponse200", + "OsidbApiV1FlawsPackageVersionsRetrieveResponse200", + "OsidbApiV1FlawsPackageVersionsUpdateResponse200", + "OsidbApiV1FlawsPromoteCreateResponse200", + "OsidbApiV1FlawsReferencesCreateResponse201", + "OsidbApiV1FlawsReferencesDestroyResponse200", + "OsidbApiV1FlawsReferencesListResponse200", + "OsidbApiV1FlawsReferencesListType", + "OsidbApiV1FlawsReferencesRetrieveResponse200", + "OsidbApiV1FlawsReferencesUpdateResponse200", + "OsidbApiV1FlawsRejectCreateResponse200", + "OsidbApiV1FlawsRetrieveResponse200", + "OsidbApiV1FlawsUpdateResponse200", + "OsidbApiV1ManifestRetrieveResponse200", + "OsidbApiV1SchemaRetrieveFormat", + "OsidbApiV1SchemaRetrieveLang", + "OsidbApiV1SchemaRetrieveResponse200", + "OsidbApiV1StatusRetrieveResponse200", + "OsidbApiV1StatusRetrieveResponse200OsidbData", + "OsidbApiV1StatusRetrieveResponse200OsidbService", + "OsidbApiV1TrackersCreateResponse201", + "OsidbApiV1TrackersListAffectsAffectedness", + "OsidbApiV1TrackersListAffectsFlawImpact", + "OsidbApiV1TrackersListAffectsFlawSource", + "OsidbApiV1TrackersListAffectsImpact", + "OsidbApiV1TrackersListAffectsResolution", + "OsidbApiV1TrackersListOrderItem", + "OsidbApiV1TrackersListResponse200", + "OsidbApiV1TrackersListType", + "OsidbApiV1TrackersRetrieveResponse200", + "OsidbApiV1TrackersUpdateResponse200", + "OsidbHealthyRetrieveResponse200", + "OsidbWhoamiRetrieveResponse200", + "OsidbWhoamiRetrieveResponse200Profile", + "Package", + "PackageVer", + "PaginatedAffectCVSSList", + "PaginatedAffectList", + "PaginatedAlertList", + "PaginatedAuditList", + "PaginatedEPSSList", + "PaginatedExploitOnlyReportDataList", + "PaginatedFlawAcknowledgmentList", + "PaginatedFlawCommentList", + "PaginatedFlawCVSSList", + "PaginatedFlawList", + "PaginatedFlawPackageVersionList", + "PaginatedFlawReferenceList", + "PaginatedFlawReportDataList", + "PaginatedSupportedProductsList", + "PaginatedTrackerList", + "PsStreamSelection", + "Reject", + "RequiresCveDescriptionEnum", + "ResolutionEnum", + "SourceBe0Enum", + "SupportedProducts", + "TokenObtainPair", + "TokenRefresh", + "TokenVerify", + "Tracker", + "TrackerPost", + "TrackerReportData", + "TrackersApiV1FileCreateResponse200", + "TrackerSuggestion", + "TrackerType", + "WorkflowsApiV1WorkflowsAdjustCreateResponse200", + "WorkflowsApiV1WorkflowsRetrieve2Response200", + "WorkflowsApiV1WorkflowsRetrieveResponse200", + "WorkflowsHealthyRetrieveResponse200", + "WorkflowsRetrieveResponse200", +) diff --git a/osidb_bindings/bindings/python_client/models/affect.py b/osidb_bindings/bindings/python_client/models/affect.py index f65522d..fe3580c 100644 --- a/osidb_bindings/bindings/python_client/models/affect.py +++ b/osidb_bindings/bindings/python_client/models/affect.py @@ -1,75 +1,117 @@ import datetime import json -from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect_cvss import AffectCVSS from ..models.affectedness_enum import AffectednessEnum -from ..models.alert import Alert from ..models.blank_enum import BlankEnum from ..models.impact_enum import ImpactEnum from ..models.resolution_enum import ResolutionEnum -from ..models.tracker import Tracker from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect_cvss import AffectCVSS + from ..models.alert import Alert + from ..models.tracker import Tracker + + T = TypeVar("T", bound="Affect") -@attr.s(auto_attribs=True) +@_attrs_define class Affect(OSIDBModel): - """Affect serializer""" - - uuid: str + """Affect serializer + + Attributes: + uuid (UUID): + flaw (Union[None, UUID]): + ps_module (str): + ps_product (str): + trackers (list['Tracker']): + delegated_resolution (str): + cvss_scores (list['AffectCVSS']): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affectedness (Union[AffectednessEnum, BlankEnum, Unset]): + resolution (Union[BlankEnum, ResolutionEnum, Unset]): + ps_component (Union[None, Unset, str]): + impact (Union[BlankEnum, ImpactEnum, Unset]): + purl (Union[None, Unset, str]): + """ + + uuid: UUID + flaw: Union[None, UUID] ps_module: str ps_product: str - trackers: List[Tracker] + trackers: list["Tracker"] delegated_resolution: str - cvss_scores: List[AffectCVSS] + cvss_scores: list["AffectCVSS"] embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - flaw: Optional[str] affectedness: Union[AffectednessEnum, BlankEnum, Unset] = UNSET resolution: Union[BlankEnum, ResolutionEnum, Unset] = UNSET - ps_component: Union[Unset, None, str] = UNSET + ps_component: Union[None, Unset, str] = UNSET impact: Union[BlankEnum, ImpactEnum, Unset] = UNSET - purl: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + purl: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + flaw: Union[None, str] + if isinstance(self.flaw, UUID): + flaw = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + else: + flaw = self.flaw - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid ps_module = self.ps_module + ps_product = self.ps_product - trackers: List[Dict[str, Any]] = UNSET + + trackers: list[dict[str, Any]] = UNSET if not isinstance(self.trackers, Unset): trackers = [] for trackers_item_data in self.trackers: - trackers_item: Dict[str, Any] = UNSET + trackers_item: dict[str, Any] = UNSET if not isinstance(trackers_item_data, Unset): trackers_item = trackers_item_data.to_dict() trackers.append(trackers_item) delegated_resolution = self.delegated_resolution - cvss_scores: List[Dict[str, Any]] = UNSET + + cvss_scores: list[dict[str, Any]] = UNSET if not isinstance(self.cvss_scores, Unset): cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() cvss_scores.append(cvss_scores_item) embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -83,20 +125,17 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - flaw = self.flaw affectedness: Union[Unset, str] if isinstance(self.affectedness, Unset): affectedness = UNSET elif isinstance(self.affectedness, AffectednessEnum): affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = AffectednessEnum(self.affectedness).value else: affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = BlankEnum(self.affectedness).value resolution: Union[Unset, str] @@ -105,37 +144,44 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.resolution, ResolutionEnum): resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = ResolutionEnum(self.resolution).value else: resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = BlankEnum(self.resolution).value - ps_component = self.ps_component + ps_component: Union[None, Unset, str] + if isinstance(self.ps_component, Unset): + ps_component = UNSET + else: + ps_component = self.ps_component + impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): impact = UNSET if not isinstance(self.impact, Unset): - impact = ImpactEnum(self.impact).value else: impact = UNSET if not isinstance(self.impact, Unset): - impact = BlankEnum(self.impact).value - purl = self.purl + purl: Union[None, Unset, str] + if isinstance(self.purl, Unset): + purl = UNSET + else: + purl = self.purl - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid + if not isinstance(flaw, Unset): + field_dict["flaw"] = flaw if not isinstance(ps_module, Unset): field_dict["ps_module"] = ps_module if not isinstance(ps_product, Unset): @@ -154,8 +200,6 @@ def to_dict(self) -> Dict[str, Any]: field_dict["created_dt"] = created_dt if not isinstance(updated_dt, Unset): field_dict["updated_dt"] = updated_dt - if not isinstance(flaw, Unset): - field_dict["flaw"] = flaw if not isinstance(affectedness, Unset): field_dict["affectedness"] = affectedness if not isinstance(resolution, Unset): @@ -169,131 +213,140 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - ps_module = ( - self.ps_module - if self.ps_module is UNSET - else (None, str(self.ps_module), "text/plain") - ) - ps_product = ( - self.ps_product - if self.ps_product is UNSET - else (None, str(self.ps_product), "text/plain") - ) - trackers: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + flaw: tuple[None, bytes, str] + + if isinstance(self.flaw, UUID): + flaw: bytes = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + else: + flaw = (None, str(self.flaw).encode(), "text/plain") + + ps_module = (None, str(self.ps_module).encode(), "text/plain") + + ps_product = (None, str(self.ps_product).encode(), "text/plain") + + trackers: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.trackers, Unset): _temp_trackers = [] for trackers_item_data in self.trackers: - trackers_item: Dict[str, Any] = UNSET + trackers_item: dict[str, Any] = UNSET if not isinstance(trackers_item_data, Unset): trackers_item = trackers_item_data.to_dict() _temp_trackers.append(trackers_item) - trackers = (None, json.dumps(_temp_trackers), "application/json") + trackers = (None, json.dumps(_temp_trackers).encode(), "application/json") - delegated_resolution = ( - self.delegated_resolution - if self.delegated_resolution is UNSET - else (None, str(self.delegated_resolution), "text/plain") - ) - cvss_scores: Union[Unset, Tuple[None, str, str]] = UNSET + delegated_resolution = (None, str(self.delegated_resolution).encode(), "text/plain") + + cvss_scores: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.cvss_scores, Unset): _temp_cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() _temp_cvss_scores.append(cvss_scores_item) - cvss_scores = (None, json.dumps(_temp_cvss_scores), "application/json") + cvss_scores = (None, json.dumps(_temp_cvss_scores).encode(), "application/json") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() - updated_dt: str = UNSET + updated_dt: bytes = UNSET if not isinstance(self.updated_dt, Unset): - updated_dt = self.updated_dt.isoformat() + updated_dt = self.updated_dt.isoformat().encode() + + affectedness: Union[Unset, tuple[None, bytes, str]] - flaw = self.flaw if self.flaw is UNSET else (None, str(self.flaw), "text/plain") - affectedness: Union[Unset, str] if isinstance(self.affectedness, Unset): affectedness = UNSET elif isinstance(self.affectedness, AffectednessEnum): - affectedness = UNSET + affectedness: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.affectedness, Unset): - - affectedness = AffectednessEnum(self.affectedness).value - + affectedness = (None, str(self.affectedness.value).encode(), "text/plain") + # CHANGE END (3) #} else: - affectedness = UNSET + affectedness: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.affectedness, Unset): + affectedness = (None, str(self.affectedness.value).encode(), "text/plain") + # CHANGE END (3) #} - affectedness = BlankEnum(self.affectedness).value + resolution: Union[Unset, tuple[None, bytes, str]] - resolution: Union[Unset, str] if isinstance(self.resolution, Unset): resolution = UNSET elif isinstance(self.resolution, ResolutionEnum): - resolution = UNSET + resolution: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.resolution, Unset): + resolution = (None, str(self.resolution.value).encode(), "text/plain") + # CHANGE END (3) #} + else: + resolution: Union[Unset, tuple[None, bytes, str]] = UNSET + if not isinstance(self.resolution, Unset): + resolution = (None, str(self.resolution.value).encode(), "text/plain") + # CHANGE END (3) #} - resolution = ResolutionEnum(self.resolution).value + ps_component: Union[Unset, tuple[None, bytes, str]] + if isinstance(self.ps_component, Unset): + ps_component = UNSET + elif isinstance(self.ps_component, str): + ps_component = (None, str(self.ps_component).encode(), "text/plain") else: - resolution = UNSET - if not isinstance(self.resolution, Unset): + ps_component = (None, str(self.ps_component).encode(), "text/plain") - resolution = BlankEnum(self.resolution).value + impact: Union[Unset, tuple[None, bytes, str]] - ps_component = ( - self.ps_component - if self.ps_component is UNSET - else (None, str(self.ps_component), "text/plain") - ) - impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): - impact = UNSET + impact: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.impact, Unset): - - impact = ImpactEnum(self.impact).value - + impact = (None, str(self.impact.value).encode(), "text/plain") + # CHANGE END (3) #} else: - impact = UNSET + impact: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.impact, Unset): + impact = (None, str(self.impact.value).encode(), "text/plain") + # CHANGE END (3) #} - impact = BlankEnum(self.impact).value + purl: Union[Unset, tuple[None, bytes, str]] - purl = self.purl if self.purl is UNSET else (None, str(self.purl), "text/plain") + if isinstance(self.purl, Unset): + purl = UNSET + elif isinstance(self.purl, str): + purl = (None, str(self.purl).encode(), "text/plain") + else: + purl = (None, str(self.purl).encode(), "text/plain") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid + if not isinstance(flaw, Unset): + field_dict["flaw"] = flaw if not isinstance(ps_module, Unset): field_dict["ps_module"] = ps_module if not isinstance(ps_product, Unset): @@ -312,8 +365,6 @@ def to_multipart(self) -> Dict[str, Any]: field_dict["created_dt"] = created_dt if not isinstance(updated_dt, Unset): field_dict["updated_dt"] = updated_dt - if not isinstance(flaw, Unset): - field_dict["flaw"] = flaw if not isinstance(affectedness, Unset): field_dict["affectedness"] = affectedness if not isinstance(resolution, Unset): @@ -328,9 +379,40 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect_cvss import AffectCVSS + from ..models.alert import Alert + from ..models.tracker import Tracker + d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) + + def _parse_flaw(data: object) -> Union[None, UUID]: + if data is None: + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _flaw_type_0 = data + flaw_type_0: UUID + if isinstance(_flaw_type_0, Unset): + flaw_type_0 = UNSET + else: + flaw_type_0 = UUID(_flaw_type_0) + + return flaw_type_0 + except: # noqa: E722 + pass + return cast(Union[None, UUID], data) + + flaw = _parse_flaw(d.pop("flaw", UNSET)) ps_module = d.pop("ps_module", UNSET) @@ -338,53 +420,48 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: trackers = [] _trackers = d.pop("trackers", UNSET) - if _trackers is UNSET: - trackers = UNSET - else: - for trackers_item_data in _trackers or []: - _trackers_item = trackers_item_data - trackers_item: Tracker - if isinstance(_trackers_item, Unset): - trackers_item = UNSET - else: - trackers_item = Tracker.from_dict(_trackers_item) + for trackers_item_data in _trackers or []: + # } + _trackers_item = trackers_item_data + trackers_item: Tracker + if isinstance(_trackers_item, Unset): + trackers_item = UNSET + else: + trackers_item = Tracker.from_dict(_trackers_item) - trackers.append(trackers_item) + trackers.append(trackers_item) delegated_resolution = d.pop("delegated_resolution", UNSET) cvss_scores = [] _cvss_scores = d.pop("cvss_scores", UNSET) - if _cvss_scores is UNSET: - cvss_scores = UNSET - else: - for cvss_scores_item_data in _cvss_scores or []: - _cvss_scores_item = cvss_scores_item_data - cvss_scores_item: AffectCVSS - if isinstance(_cvss_scores_item, Unset): - cvss_scores_item = UNSET - else: - cvss_scores_item = AffectCVSS.from_dict(_cvss_scores_item) + for cvss_scores_item_data in _cvss_scores or []: + # } + _cvss_scores_item = cvss_scores_item_data + cvss_scores_item: AffectCVSS + if isinstance(_cvss_scores_item, Unset): + cvss_scores_item = UNSET + else: + cvss_scores_item = AffectCVSS.from_dict(_cvss_scores_item) - cvss_scores.append(cvss_scores_item) + cvss_scores.append(cvss_scores_item) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) - alerts.append(alerts_item) + alerts.append(alerts_item) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -392,6 +469,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -399,18 +477,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - flaw = d.pop("flaw", UNSET) - - def _parse_affectedness( - data: object, - ) -> Union[AffectednessEnum, BlankEnum, Unset]: + def _parse_affectedness(data: object) -> Union[AffectednessEnum, BlankEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _affectedness_type_0 = data - affectedness_type_0: Union[Unset, AffectednessEnum] + affectedness_type_0: AffectednessEnum if isinstance(_affectedness_type_0, Unset): affectedness_type_0 = UNSET else: @@ -421,8 +496,9 @@ def _parse_affectedness( pass if not isinstance(data, str): raise TypeError() + # } _affectedness_type_1 = data - affectedness_type_1: Union[Unset, BlankEnum] + affectedness_type_1: BlankEnum if isinstance(_affectedness_type_1, Unset): affectedness_type_1 = UNSET else: @@ -438,8 +514,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _resolution_type_0 = data - resolution_type_0: Union[Unset, ResolutionEnum] + resolution_type_0: ResolutionEnum if isinstance(_resolution_type_0, Unset): resolution_type_0 = UNSET else: @@ -450,8 +527,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _resolution_type_1 = data - resolution_type_1: Union[Unset, BlankEnum] + resolution_type_1: BlankEnum if isinstance(_resolution_type_1, Unset): resolution_type_1 = UNSET else: @@ -461,7 +539,14 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: resolution = _parse_resolution(d.pop("resolution", UNSET)) - ps_component = d.pop("ps_component", UNSET) + def _parse_ps_component(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + ps_component = _parse_ps_component(d.pop("ps_component", UNSET)) def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: if isinstance(data, Unset): @@ -469,8 +554,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _impact_type_0 = data - impact_type_0: Union[Unset, ImpactEnum] + impact_type_0: ImpactEnum if isinstance(_impact_type_0, Unset): impact_type_0 = UNSET else: @@ -481,8 +567,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _impact_type_1 = data - impact_type_1: Union[Unset, BlankEnum] + impact_type_1: BlankEnum if isinstance(_impact_type_1, Unset): impact_type_1 = UNSET else: @@ -492,10 +579,18 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: impact = _parse_impact(d.pop("impact", UNSET)) - purl = d.pop("purl", UNSET) + def _parse_purl(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + purl = _parse_purl(d.pop("purl", UNSET)) affect = cls( uuid=uuid, + flaw=flaw, ps_module=ps_module, ps_product=ps_product, trackers=trackers, @@ -505,7 +600,6 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: alerts=alerts, created_dt=created_dt, updated_dt=updated_dt, - flaw=flaw, affectedness=affectedness, resolution=resolution, ps_component=ps_component, @@ -519,26 +613,26 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, + "flaw": Union[None, UUID], "ps_module": str, "ps_product": str, - "trackers": List[Tracker], + "trackers": list["Tracker"], "delegated_resolution": str, - "cvss_scores": List[AffectCVSS], + "cvss_scores": list["AffectCVSS"], "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "flaw": str, "affectedness": Union[AffectednessEnum, BlankEnum], "resolution": Union[BlankEnum, ResolutionEnum], - "ps_component": str, + "ps_component": Union[None, str], "impact": Union[BlankEnum, ImpactEnum], - "purl": str, + "purl": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/affect_bulk_post_put_response.py b/osidb_bindings/bindings/python_client/models/affect_bulk_post_put_response.py index cf75138..e37a310 100644 --- a/osidb_bindings/bindings/python_client/models/affect_bulk_post_put_response.py +++ b/osidb_bindings/bindings/python_client/models/affect_bulk_post_put_response.py @@ -1,32 +1,39 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.affect import Affect from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect import Affect + + T = TypeVar("T", bound="AffectBulkPostPutResponse") -@attr.s(auto_attribs=True) +@_attrs_define class AffectBulkPostPutResponse(OSIDBModel): - """ """ + """ + Attributes: + results (list['Affect']): + """ - results: List[Affect] - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["Affect"] + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - results: List[Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(results, Unset): field_dict["results"] = results @@ -34,22 +41,22 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + d = src_dict.copy() results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Affect - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Affect.from_dict(_results_item) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Affect + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Affect.from_dict(_results_item) - results.append(results_item) + results.append(results_item) affect_bulk_post_put_response = cls( results=results, @@ -61,11 +68,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "results": List[Affect], + "results": list["Affect"], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/affect_bulk_put.py b/osidb_bindings/bindings/python_client/models/affect_bulk_put.py index e2040f2..98c94bc 100644 --- a/osidb_bindings/bindings/python_client/models/affect_bulk_put.py +++ b/osidb_bindings/bindings/python_client/models/affect_bulk_put.py @@ -1,74 +1,116 @@ import datetime -from typing import Any, Dict, List, Optional, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect_cvss import AffectCVSS from ..models.affectedness_enum import AffectednessEnum -from ..models.alert import Alert from ..models.blank_enum import BlankEnum from ..models.impact_enum import ImpactEnum from ..models.resolution_enum import ResolutionEnum -from ..models.tracker import Tracker from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect_cvss import AffectCVSS + from ..models.alert import Alert + from ..models.tracker import Tracker + + T = TypeVar("T", bound="AffectBulkPut") -@attr.s(auto_attribs=True) +@_attrs_define class AffectBulkPut(OSIDBModel): - """Affect serializer""" - - uuid: str + """Affect serializer + + Attributes: + uuid (UUID): + flaw (Union[None, UUID]): + ps_module (str): + ps_product (str): + trackers (list['Tracker']): + delegated_resolution (str): + cvss_scores (list['AffectCVSS']): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affectedness (Union[AffectednessEnum, BlankEnum, Unset]): + resolution (Union[BlankEnum, ResolutionEnum, Unset]): + ps_component (Union[None, Unset, str]): + impact (Union[BlankEnum, ImpactEnum, Unset]): + purl (Union[None, Unset, str]): + """ + + uuid: UUID + flaw: Union[None, UUID] ps_module: str ps_product: str - trackers: List[Tracker] + trackers: list["Tracker"] delegated_resolution: str - cvss_scores: List[AffectCVSS] + cvss_scores: list["AffectCVSS"] embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - flaw: Optional[str] affectedness: Union[AffectednessEnum, BlankEnum, Unset] = UNSET resolution: Union[BlankEnum, ResolutionEnum, Unset] = UNSET - ps_component: Union[Unset, None, str] = UNSET + ps_component: Union[None, Unset, str] = UNSET impact: Union[BlankEnum, ImpactEnum, Unset] = UNSET - purl: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + purl: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + flaw: Union[None, str] + if isinstance(self.flaw, UUID): + flaw = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + else: + flaw = self.flaw - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid ps_module = self.ps_module + ps_product = self.ps_product - trackers: List[Dict[str, Any]] = UNSET + + trackers: list[dict[str, Any]] = UNSET if not isinstance(self.trackers, Unset): trackers = [] for trackers_item_data in self.trackers: - trackers_item: Dict[str, Any] = UNSET + trackers_item: dict[str, Any] = UNSET if not isinstance(trackers_item_data, Unset): trackers_item = trackers_item_data.to_dict() trackers.append(trackers_item) delegated_resolution = self.delegated_resolution - cvss_scores: List[Dict[str, Any]] = UNSET + + cvss_scores: list[dict[str, Any]] = UNSET if not isinstance(self.cvss_scores, Unset): cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() cvss_scores.append(cvss_scores_item) embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -82,20 +124,17 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - flaw = self.flaw affectedness: Union[Unset, str] if isinstance(self.affectedness, Unset): affectedness = UNSET elif isinstance(self.affectedness, AffectednessEnum): affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = AffectednessEnum(self.affectedness).value else: affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = BlankEnum(self.affectedness).value resolution: Union[Unset, str] @@ -104,37 +143,44 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.resolution, ResolutionEnum): resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = ResolutionEnum(self.resolution).value else: resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = BlankEnum(self.resolution).value - ps_component = self.ps_component + ps_component: Union[None, Unset, str] + if isinstance(self.ps_component, Unset): + ps_component = UNSET + else: + ps_component = self.ps_component + impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): impact = UNSET if not isinstance(self.impact, Unset): - impact = ImpactEnum(self.impact).value else: impact = UNSET if not isinstance(self.impact, Unset): - impact = BlankEnum(self.impact).value - purl = self.purl + purl: Union[None, Unset, str] + if isinstance(self.purl, Unset): + purl = UNSET + else: + purl = self.purl - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid + if not isinstance(flaw, Unset): + field_dict["flaw"] = flaw if not isinstance(ps_module, Unset): field_dict["ps_module"] = ps_module if not isinstance(ps_product, Unset): @@ -153,8 +199,6 @@ def to_dict(self) -> Dict[str, Any]: field_dict["created_dt"] = created_dt if not isinstance(updated_dt, Unset): field_dict["updated_dt"] = updated_dt - if not isinstance(flaw, Unset): - field_dict["flaw"] = flaw if not isinstance(affectedness, Unset): field_dict["affectedness"] = affectedness if not isinstance(resolution, Unset): @@ -169,9 +213,40 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect_cvss import AffectCVSS + from ..models.alert import Alert + from ..models.tracker import Tracker + d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) + + def _parse_flaw(data: object) -> Union[None, UUID]: + if data is None: + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _flaw_type_0 = data + flaw_type_0: UUID + if isinstance(_flaw_type_0, Unset): + flaw_type_0 = UNSET + else: + flaw_type_0 = UUID(_flaw_type_0) + + return flaw_type_0 + except: # noqa: E722 + pass + return cast(Union[None, UUID], data) + + flaw = _parse_flaw(d.pop("flaw", UNSET)) ps_module = d.pop("ps_module", UNSET) @@ -179,53 +254,48 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: trackers = [] _trackers = d.pop("trackers", UNSET) - if _trackers is UNSET: - trackers = UNSET - else: - for trackers_item_data in _trackers or []: - _trackers_item = trackers_item_data - trackers_item: Tracker - if isinstance(_trackers_item, Unset): - trackers_item = UNSET - else: - trackers_item = Tracker.from_dict(_trackers_item) + for trackers_item_data in _trackers or []: + # } + _trackers_item = trackers_item_data + trackers_item: Tracker + if isinstance(_trackers_item, Unset): + trackers_item = UNSET + else: + trackers_item = Tracker.from_dict(_trackers_item) - trackers.append(trackers_item) + trackers.append(trackers_item) delegated_resolution = d.pop("delegated_resolution", UNSET) cvss_scores = [] _cvss_scores = d.pop("cvss_scores", UNSET) - if _cvss_scores is UNSET: - cvss_scores = UNSET - else: - for cvss_scores_item_data in _cvss_scores or []: - _cvss_scores_item = cvss_scores_item_data - cvss_scores_item: AffectCVSS - if isinstance(_cvss_scores_item, Unset): - cvss_scores_item = UNSET - else: - cvss_scores_item = AffectCVSS.from_dict(_cvss_scores_item) + for cvss_scores_item_data in _cvss_scores or []: + # } + _cvss_scores_item = cvss_scores_item_data + cvss_scores_item: AffectCVSS + if isinstance(_cvss_scores_item, Unset): + cvss_scores_item = UNSET + else: + cvss_scores_item = AffectCVSS.from_dict(_cvss_scores_item) - cvss_scores.append(cvss_scores_item) + cvss_scores.append(cvss_scores_item) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) - alerts.append(alerts_item) + alerts.append(alerts_item) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -233,6 +303,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -240,18 +311,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - flaw = d.pop("flaw", UNSET) - - def _parse_affectedness( - data: object, - ) -> Union[AffectednessEnum, BlankEnum, Unset]: + def _parse_affectedness(data: object) -> Union[AffectednessEnum, BlankEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _affectedness_type_0 = data - affectedness_type_0: Union[Unset, AffectednessEnum] + affectedness_type_0: AffectednessEnum if isinstance(_affectedness_type_0, Unset): affectedness_type_0 = UNSET else: @@ -262,8 +330,9 @@ def _parse_affectedness( pass if not isinstance(data, str): raise TypeError() + # } _affectedness_type_1 = data - affectedness_type_1: Union[Unset, BlankEnum] + affectedness_type_1: BlankEnum if isinstance(_affectedness_type_1, Unset): affectedness_type_1 = UNSET else: @@ -279,8 +348,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _resolution_type_0 = data - resolution_type_0: Union[Unset, ResolutionEnum] + resolution_type_0: ResolutionEnum if isinstance(_resolution_type_0, Unset): resolution_type_0 = UNSET else: @@ -291,8 +361,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _resolution_type_1 = data - resolution_type_1: Union[Unset, BlankEnum] + resolution_type_1: BlankEnum if isinstance(_resolution_type_1, Unset): resolution_type_1 = UNSET else: @@ -302,7 +373,14 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: resolution = _parse_resolution(d.pop("resolution", UNSET)) - ps_component = d.pop("ps_component", UNSET) + def _parse_ps_component(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + ps_component = _parse_ps_component(d.pop("ps_component", UNSET)) def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: if isinstance(data, Unset): @@ -310,8 +388,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _impact_type_0 = data - impact_type_0: Union[Unset, ImpactEnum] + impact_type_0: ImpactEnum if isinstance(_impact_type_0, Unset): impact_type_0 = UNSET else: @@ -322,8 +401,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _impact_type_1 = data - impact_type_1: Union[Unset, BlankEnum] + impact_type_1: BlankEnum if isinstance(_impact_type_1, Unset): impact_type_1 = UNSET else: @@ -333,10 +413,18 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: impact = _parse_impact(d.pop("impact", UNSET)) - purl = d.pop("purl", UNSET) + def _parse_purl(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + purl = _parse_purl(d.pop("purl", UNSET)) affect_bulk_put = cls( uuid=uuid, + flaw=flaw, ps_module=ps_module, ps_product=ps_product, trackers=trackers, @@ -346,7 +434,6 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: alerts=alerts, created_dt=created_dt, updated_dt=updated_dt, - flaw=flaw, affectedness=affectedness, resolution=resolution, ps_component=ps_component, @@ -360,26 +447,26 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, + "flaw": Union[None, UUID], "ps_module": str, "ps_product": str, - "trackers": List[Tracker], + "trackers": list["Tracker"], "delegated_resolution": str, - "cvss_scores": List[AffectCVSS], + "cvss_scores": list["AffectCVSS"], "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "flaw": str, "affectedness": Union[AffectednessEnum, BlankEnum], "resolution": Union[BlankEnum, ResolutionEnum], - "ps_component": str, + "ps_component": Union[None, str], "impact": Union[BlankEnum, ImpactEnum], - "purl": str, + "purl": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/affect_cvss.py b/osidb_bindings/bindings/python_client/models/affect_cvss.py index 9bfd471..bb066b0 100644 --- a/osidb_bindings/bindings/python_client/models/affect_cvss.py +++ b/osidb_bindings/bindings/python_client/models/affect_cvss.py @@ -1,54 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.cvss_version_enum import CvssVersionEnum from ..models.issuer_enum import IssuerEnum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="AffectCVSS") -@attr.s(auto_attribs=True) +@_attrs_define class AffectCVSS(OSIDBModel): - """AffectCVSS serializer""" + """AffectCVSS serializer + + Attributes: + cvss_version (CvssVersionEnum): + issuer (IssuerEnum): + score (float): + uuid (UUID): + vector (str): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affect (Union[Unset, UUID]): + comment (Union[None, Unset, str]): + """ cvss_version: CvssVersionEnum issuer: IssuerEnum score: float - uuid: str + uuid: UUID vector: str embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - affect: Union[Unset, str] = UNSET - comment: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + affect: Union[Unset, UUID] = UNSET + comment: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cvss_version: str = UNSET if not isinstance(self.cvss_version, Unset): - cvss_version = CvssVersionEnum(self.cvss_version).value issuer: str = UNSET if not isinstance(self.issuer, Unset): - issuer = IssuerEnum(self.issuer).value score = self.score - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + vector = self.vector + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -62,10 +87,17 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - affect = self.affect - comment = self.comment + affect: Union[Unset, str] = UNSET + if not isinstance(self.affect, Unset): + affect = str(self.affect) + + comment: Union[None, Unset, str] + if isinstance(self.comment, Unset): + comment = UNSET + else: + comment = self.comment - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version @@ -93,8 +125,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() + # } _cvss_version = d.pop("cvss_version", UNSET) cvss_version: CvssVersionEnum if isinstance(_cvss_version, Unset): @@ -102,6 +137,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cvss_version = CvssVersionEnum(_cvss_version) + # } _issuer = d.pop("issuer", UNSET) issuer: IssuerEnum if isinstance(_issuer, Unset): @@ -111,7 +147,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: score = d.pop("score", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) vector = d.pop("vector", UNSET) @@ -119,19 +161,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -139,6 +180,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -146,9 +188,22 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - affect = d.pop("affect", UNSET) + # } + _affect = d.pop("affect", UNSET) + affect: Union[Unset, UUID] + if isinstance(_affect, Unset): + affect = UNSET + else: + affect = UUID(_affect) + + def _parse_comment(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) - comment = d.pop("comment", UNSET) + comment = _parse_comment(d.pop("comment", UNSET)) affect_cvss = cls( cvss_version=cvss_version, @@ -173,18 +228,18 @@ def get_fields(): "cvss_version": CvssVersionEnum, "issuer": IssuerEnum, "score": float, - "uuid": str, + "uuid": UUID, "vector": str, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "affect": str, - "comment": str, + "affect": UUID, + "comment": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/affect_cvss_post.py b/osidb_bindings/bindings/python_client/models/affect_cvss_post.py index 9908b87..17d2fe0 100644 --- a/osidb_bindings/bindings/python_client/models/affect_cvss_post.py +++ b/osidb_bindings/bindings/python_client/models/affect_cvss_post.py @@ -1,53 +1,75 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.cvss_version_enum import CvssVersionEnum from ..models.issuer_enum import IssuerEnum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="AffectCVSSPost") -@attr.s(auto_attribs=True) +@_attrs_define class AffectCVSSPost(OSIDBModel): - """AffectCVSS serializer""" + """AffectCVSS serializer + + Attributes: + cvss_version (CvssVersionEnum): + issuer (IssuerEnum): + score (float): + uuid (UUID): + vector (str): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + comment (Union[None, Unset, str]): + """ cvss_version: CvssVersionEnum issuer: IssuerEnum score: float - uuid: str + uuid: UUID vector: str embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime - comment: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + comment: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cvss_version: str = UNSET if not isinstance(self.cvss_version, Unset): - cvss_version = CvssVersionEnum(self.cvss_version).value issuer: str = UNSET if not isinstance(self.issuer, Unset): - issuer = IssuerEnum(self.issuer).value score = self.score - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + vector = self.vector + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -57,9 +79,13 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() - comment = self.comment + comment: Union[None, Unset, str] + if isinstance(self.comment, Unset): + comment = UNSET + else: + comment = self.comment - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version @@ -82,59 +108,55 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - cvss_version: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + cvss_version: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.cvss_version, Unset): + cvss_version = (None, str(self.cvss_version.value).encode(), "text/plain") + # CHANGE END (3) #} - cvss_version = CvssVersionEnum(self.cvss_version).value - - issuer: Union[Unset, Tuple[None, str, str]] = UNSET + issuer: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.issuer, Unset): + issuer = (None, str(self.issuer.value).encode(), "text/plain") + # CHANGE END (3) #} - issuer = IssuerEnum(self.issuer).value + score = (None, str(self.score).encode(), "text/plain") - score = ( - self.score if self.score is UNSET else (None, str(self.score), "text/plain") - ) - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - vector = ( - self.vector - if self.vector is UNSET - else (None, str(self.vector), "text/plain") - ) - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + vector = (None, str(self.vector).encode(), "text/plain") + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() - comment = ( - self.comment - if self.comment is UNSET - else (None, str(self.comment), "text/plain") - ) + comment: Union[Unset, tuple[None, bytes, str]] + + if isinstance(self.comment, Unset): + comment = UNSET + elif isinstance(self.comment, str): + comment = (None, str(self.comment).encode(), "text/plain") + else: + comment = (None, str(self.comment).encode(), "text/plain") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version if not isinstance(issuer, Unset): @@ -157,8 +179,11 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() + # } _cvss_version = d.pop("cvss_version", UNSET) cvss_version: CvssVersionEnum if isinstance(_cvss_version, Unset): @@ -166,6 +191,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cvss_version = CvssVersionEnum(_cvss_version) + # } _issuer = d.pop("issuer", UNSET) issuer: IssuerEnum if isinstance(_issuer, Unset): @@ -175,7 +201,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: score = d.pop("score", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) vector = d.pop("vector", UNSET) @@ -183,19 +215,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -203,7 +234,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) - comment = d.pop("comment", UNSET) + def _parse_comment(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + comment = _parse_comment(d.pop("comment", UNSET)) affect_cvss_post = cls( cvss_version=cvss_version, @@ -226,16 +264,16 @@ def get_fields(): "cvss_version": CvssVersionEnum, "issuer": IssuerEnum, "score": float, - "uuid": str, + "uuid": UUID, "vector": str, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, - "comment": str, + "comment": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/affect_cvss_put.py b/osidb_bindings/bindings/python_client/models/affect_cvss_put.py index c17cf4f..6ea32a5 100644 --- a/osidb_bindings/bindings/python_client/models/affect_cvss_put.py +++ b/osidb_bindings/bindings/python_client/models/affect_cvss_put.py @@ -1,54 +1,78 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.cvss_version_enum import CvssVersionEnum from ..models.issuer_enum import IssuerEnum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="AffectCVSSPut") -@attr.s(auto_attribs=True) +@_attrs_define class AffectCVSSPut(OSIDBModel): - """AffectCVSS serializer""" + """AffectCVSS serializer + + Attributes: + cvss_version (CvssVersionEnum): + issuer (IssuerEnum): + score (float): + uuid (UUID): + vector (str): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + comment (Union[None, Unset, str]): + """ cvss_version: CvssVersionEnum issuer: IssuerEnum score: float - uuid: str + uuid: UUID vector: str embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - comment: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + comment: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cvss_version: str = UNSET if not isinstance(self.cvss_version, Unset): - cvss_version = CvssVersionEnum(self.cvss_version).value issuer: str = UNSET if not isinstance(self.issuer, Unset): - issuer = IssuerEnum(self.issuer).value score = self.score - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + vector = self.vector + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -62,9 +86,13 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - comment = self.comment + comment: Union[None, Unset, str] + if isinstance(self.comment, Unset): + comment = UNSET + else: + comment = self.comment - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version @@ -89,63 +117,59 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - cvss_version: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + cvss_version: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.cvss_version, Unset): + cvss_version = (None, str(self.cvss_version.value).encode(), "text/plain") + # CHANGE END (3) #} - cvss_version = CvssVersionEnum(self.cvss_version).value - - issuer: Union[Unset, Tuple[None, str, str]] = UNSET + issuer: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.issuer, Unset): + issuer = (None, str(self.issuer.value).encode(), "text/plain") + # CHANGE END (3) #} - issuer = IssuerEnum(self.issuer).value + score = (None, str(self.score).encode(), "text/plain") - score = ( - self.score if self.score is UNSET else (None, str(self.score), "text/plain") - ) - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - vector = ( - self.vector - if self.vector is UNSET - else (None, str(self.vector), "text/plain") - ) - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + vector = (None, str(self.vector).encode(), "text/plain") + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() - updated_dt: str = UNSET + updated_dt: bytes = UNSET if not isinstance(self.updated_dt, Unset): - updated_dt = self.updated_dt.isoformat() + updated_dt = self.updated_dt.isoformat().encode() - comment = ( - self.comment - if self.comment is UNSET - else (None, str(self.comment), "text/plain") - ) + comment: Union[Unset, tuple[None, bytes, str]] + + if isinstance(self.comment, Unset): + comment = UNSET + elif isinstance(self.comment, str): + comment = (None, str(self.comment).encode(), "text/plain") + else: + comment = (None, str(self.comment).encode(), "text/plain") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version if not isinstance(issuer, Unset): @@ -170,8 +194,11 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() + # } _cvss_version = d.pop("cvss_version", UNSET) cvss_version: CvssVersionEnum if isinstance(_cvss_version, Unset): @@ -179,6 +206,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cvss_version = CvssVersionEnum(_cvss_version) + # } _issuer = d.pop("issuer", UNSET) issuer: IssuerEnum if isinstance(_issuer, Unset): @@ -188,7 +216,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: score = d.pop("score", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) vector = d.pop("vector", UNSET) @@ -196,19 +230,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -216,6 +249,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -223,7 +257,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - comment = d.pop("comment", UNSET) + def _parse_comment(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + comment = _parse_comment(d.pop("comment", UNSET)) affect_cvss_put = cls( cvss_version=cvss_version, @@ -247,17 +288,17 @@ def get_fields(): "cvss_version": CvssVersionEnum, "issuer": IssuerEnum, "score": float, - "uuid": str, + "uuid": UUID, "vector": str, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "comment": str, + "comment": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/affect_post.py b/osidb_bindings/bindings/python_client/models/affect_post.py index da5e945..0cd5180 100644 --- a/osidb_bindings/bindings/python_client/models/affect_post.py +++ b/osidb_bindings/bindings/python_client/models/affect_post.py @@ -1,74 +1,114 @@ import datetime import json -from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect_cvss import AffectCVSS from ..models.affectedness_enum import AffectednessEnum -from ..models.alert import Alert from ..models.blank_enum import BlankEnum from ..models.impact_enum import ImpactEnum from ..models.resolution_enum import ResolutionEnum -from ..models.tracker import Tracker from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect_cvss import AffectCVSS + from ..models.alert import Alert + from ..models.tracker import Tracker + + T = TypeVar("T", bound="AffectPost") -@attr.s(auto_attribs=True) +@_attrs_define class AffectPost(OSIDBModel): - """Affect serializer""" - - uuid: str + """Affect serializer + + Attributes: + uuid (UUID): + flaw (Union[None, UUID]): + ps_module (str): + ps_product (str): + trackers (list['Tracker']): + delegated_resolution (str): + cvss_scores (list['AffectCVSS']): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + affectedness (Union[AffectednessEnum, BlankEnum, Unset]): + resolution (Union[BlankEnum, ResolutionEnum, Unset]): + ps_component (Union[None, Unset, str]): + impact (Union[BlankEnum, ImpactEnum, Unset]): + purl (Union[None, Unset, str]): + """ + + uuid: UUID + flaw: Union[None, UUID] ps_module: str ps_product: str - trackers: List[Tracker] + trackers: list["Tracker"] delegated_resolution: str - cvss_scores: List[AffectCVSS] + cvss_scores: list["AffectCVSS"] embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime - flaw: Optional[str] affectedness: Union[AffectednessEnum, BlankEnum, Unset] = UNSET resolution: Union[BlankEnum, ResolutionEnum, Unset] = UNSET - ps_component: Union[Unset, None, str] = UNSET + ps_component: Union[None, Unset, str] = UNSET impact: Union[BlankEnum, ImpactEnum, Unset] = UNSET - purl: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + purl: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + flaw: Union[None, str] + if isinstance(self.flaw, UUID): + flaw = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + else: + flaw = self.flaw - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid ps_module = self.ps_module + ps_product = self.ps_product - trackers: List[Dict[str, Any]] = UNSET + + trackers: list[dict[str, Any]] = UNSET if not isinstance(self.trackers, Unset): trackers = [] for trackers_item_data in self.trackers: - trackers_item: Dict[str, Any] = UNSET + trackers_item: dict[str, Any] = UNSET if not isinstance(trackers_item_data, Unset): trackers_item = trackers_item_data.to_dict() trackers.append(trackers_item) delegated_resolution = self.delegated_resolution - cvss_scores: List[Dict[str, Any]] = UNSET + + cvss_scores: list[dict[str, Any]] = UNSET if not isinstance(self.cvss_scores, Unset): cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() cvss_scores.append(cvss_scores_item) embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -78,20 +118,17 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() - flaw = self.flaw affectedness: Union[Unset, str] if isinstance(self.affectedness, Unset): affectedness = UNSET elif isinstance(self.affectedness, AffectednessEnum): affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = AffectednessEnum(self.affectedness).value else: affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = BlankEnum(self.affectedness).value resolution: Union[Unset, str] @@ -100,37 +137,44 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.resolution, ResolutionEnum): resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = ResolutionEnum(self.resolution).value else: resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = BlankEnum(self.resolution).value - ps_component = self.ps_component + ps_component: Union[None, Unset, str] + if isinstance(self.ps_component, Unset): + ps_component = UNSET + else: + ps_component = self.ps_component + impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): impact = UNSET if not isinstance(self.impact, Unset): - impact = ImpactEnum(self.impact).value else: impact = UNSET if not isinstance(self.impact, Unset): - impact = BlankEnum(self.impact).value - purl = self.purl + purl: Union[None, Unset, str] + if isinstance(self.purl, Unset): + purl = UNSET + else: + purl = self.purl - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid + if not isinstance(flaw, Unset): + field_dict["flaw"] = flaw if not isinstance(ps_module, Unset): field_dict["ps_module"] = ps_module if not isinstance(ps_product, Unset): @@ -147,8 +191,6 @@ def to_dict(self) -> Dict[str, Any]: field_dict["alerts"] = alerts if not isinstance(created_dt, Unset): field_dict["created_dt"] = created_dt - if not isinstance(flaw, Unset): - field_dict["flaw"] = flaw if not isinstance(affectedness, Unset): field_dict["affectedness"] = affectedness if not isinstance(resolution, Unset): @@ -162,127 +204,136 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - ps_module = ( - self.ps_module - if self.ps_module is UNSET - else (None, str(self.ps_module), "text/plain") - ) - ps_product = ( - self.ps_product - if self.ps_product is UNSET - else (None, str(self.ps_product), "text/plain") - ) - trackers: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + flaw: tuple[None, bytes, str] + + if isinstance(self.flaw, UUID): + flaw: bytes = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + else: + flaw = (None, str(self.flaw).encode(), "text/plain") + + ps_module = (None, str(self.ps_module).encode(), "text/plain") + + ps_product = (None, str(self.ps_product).encode(), "text/plain") + + trackers: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.trackers, Unset): _temp_trackers = [] for trackers_item_data in self.trackers: - trackers_item: Dict[str, Any] = UNSET + trackers_item: dict[str, Any] = UNSET if not isinstance(trackers_item_data, Unset): trackers_item = trackers_item_data.to_dict() _temp_trackers.append(trackers_item) - trackers = (None, json.dumps(_temp_trackers), "application/json") + trackers = (None, json.dumps(_temp_trackers).encode(), "application/json") - delegated_resolution = ( - self.delegated_resolution - if self.delegated_resolution is UNSET - else (None, str(self.delegated_resolution), "text/plain") - ) - cvss_scores: Union[Unset, Tuple[None, str, str]] = UNSET + delegated_resolution = (None, str(self.delegated_resolution).encode(), "text/plain") + + cvss_scores: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.cvss_scores, Unset): _temp_cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() _temp_cvss_scores.append(cvss_scores_item) - cvss_scores = (None, json.dumps(_temp_cvss_scores), "application/json") + cvss_scores = (None, json.dumps(_temp_cvss_scores).encode(), "application/json") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() + + affectedness: Union[Unset, tuple[None, bytes, str]] - flaw = self.flaw if self.flaw is UNSET else (None, str(self.flaw), "text/plain") - affectedness: Union[Unset, str] if isinstance(self.affectedness, Unset): affectedness = UNSET elif isinstance(self.affectedness, AffectednessEnum): - affectedness = UNSET + affectedness: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.affectedness, Unset): - - affectedness = AffectednessEnum(self.affectedness).value - + affectedness = (None, str(self.affectedness.value).encode(), "text/plain") + # CHANGE END (3) #} else: - affectedness = UNSET + affectedness: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.affectedness, Unset): + affectedness = (None, str(self.affectedness.value).encode(), "text/plain") + # CHANGE END (3) #} - affectedness = BlankEnum(self.affectedness).value + resolution: Union[Unset, tuple[None, bytes, str]] - resolution: Union[Unset, str] if isinstance(self.resolution, Unset): resolution = UNSET elif isinstance(self.resolution, ResolutionEnum): - resolution = UNSET + resolution: Union[Unset, tuple[None, bytes, str]] = UNSET + if not isinstance(self.resolution, Unset): + resolution = (None, str(self.resolution.value).encode(), "text/plain") + # CHANGE END (3) #} + else: + resolution: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.resolution, Unset): + resolution = (None, str(self.resolution.value).encode(), "text/plain") + # CHANGE END (3) #} - resolution = ResolutionEnum(self.resolution).value + ps_component: Union[Unset, tuple[None, bytes, str]] + if isinstance(self.ps_component, Unset): + ps_component = UNSET + elif isinstance(self.ps_component, str): + ps_component = (None, str(self.ps_component).encode(), "text/plain") else: - resolution = UNSET - if not isinstance(self.resolution, Unset): + ps_component = (None, str(self.ps_component).encode(), "text/plain") - resolution = BlankEnum(self.resolution).value + impact: Union[Unset, tuple[None, bytes, str]] - ps_component = ( - self.ps_component - if self.ps_component is UNSET - else (None, str(self.ps_component), "text/plain") - ) - impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): - impact = UNSET + impact: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.impact, Unset): - - impact = ImpactEnum(self.impact).value - + impact = (None, str(self.impact.value).encode(), "text/plain") + # CHANGE END (3) #} else: - impact = UNSET + impact: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.impact, Unset): + impact = (None, str(self.impact.value).encode(), "text/plain") + # CHANGE END (3) #} - impact = BlankEnum(self.impact).value + purl: Union[Unset, tuple[None, bytes, str]] - purl = self.purl if self.purl is UNSET else (None, str(self.purl), "text/plain") + if isinstance(self.purl, Unset): + purl = UNSET + elif isinstance(self.purl, str): + purl = (None, str(self.purl).encode(), "text/plain") + else: + purl = (None, str(self.purl).encode(), "text/plain") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid + if not isinstance(flaw, Unset): + field_dict["flaw"] = flaw if not isinstance(ps_module, Unset): field_dict["ps_module"] = ps_module if not isinstance(ps_product, Unset): @@ -299,8 +350,6 @@ def to_multipart(self) -> Dict[str, Any]: field_dict["alerts"] = alerts if not isinstance(created_dt, Unset): field_dict["created_dt"] = created_dt - if not isinstance(flaw, Unset): - field_dict["flaw"] = flaw if not isinstance(affectedness, Unset): field_dict["affectedness"] = affectedness if not isinstance(resolution, Unset): @@ -315,9 +364,40 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect_cvss import AffectCVSS + from ..models.alert import Alert + from ..models.tracker import Tracker + d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) + + def _parse_flaw(data: object) -> Union[None, UUID]: + if data is None: + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _flaw_type_0 = data + flaw_type_0: UUID + if isinstance(_flaw_type_0, Unset): + flaw_type_0 = UNSET + else: + flaw_type_0 = UUID(_flaw_type_0) + + return flaw_type_0 + except: # noqa: E722 + pass + return cast(Union[None, UUID], data) + + flaw = _parse_flaw(d.pop("flaw", UNSET)) ps_module = d.pop("ps_module", UNSET) @@ -325,53 +405,48 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: trackers = [] _trackers = d.pop("trackers", UNSET) - if _trackers is UNSET: - trackers = UNSET - else: - for trackers_item_data in _trackers or []: - _trackers_item = trackers_item_data - trackers_item: Tracker - if isinstance(_trackers_item, Unset): - trackers_item = UNSET - else: - trackers_item = Tracker.from_dict(_trackers_item) + for trackers_item_data in _trackers or []: + # } + _trackers_item = trackers_item_data + trackers_item: Tracker + if isinstance(_trackers_item, Unset): + trackers_item = UNSET + else: + trackers_item = Tracker.from_dict(_trackers_item) - trackers.append(trackers_item) + trackers.append(trackers_item) delegated_resolution = d.pop("delegated_resolution", UNSET) cvss_scores = [] _cvss_scores = d.pop("cvss_scores", UNSET) - if _cvss_scores is UNSET: - cvss_scores = UNSET - else: - for cvss_scores_item_data in _cvss_scores or []: - _cvss_scores_item = cvss_scores_item_data - cvss_scores_item: AffectCVSS - if isinstance(_cvss_scores_item, Unset): - cvss_scores_item = UNSET - else: - cvss_scores_item = AffectCVSS.from_dict(_cvss_scores_item) + for cvss_scores_item_data in _cvss_scores or []: + # } + _cvss_scores_item = cvss_scores_item_data + cvss_scores_item: AffectCVSS + if isinstance(_cvss_scores_item, Unset): + cvss_scores_item = UNSET + else: + cvss_scores_item = AffectCVSS.from_dict(_cvss_scores_item) - cvss_scores.append(cvss_scores_item) + cvss_scores.append(cvss_scores_item) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) - alerts.append(alerts_item) + alerts.append(alerts_item) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -379,18 +454,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) - flaw = d.pop("flaw", UNSET) - - def _parse_affectedness( - data: object, - ) -> Union[AffectednessEnum, BlankEnum, Unset]: + def _parse_affectedness(data: object) -> Union[AffectednessEnum, BlankEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _affectedness_type_0 = data - affectedness_type_0: Union[Unset, AffectednessEnum] + affectedness_type_0: AffectednessEnum if isinstance(_affectedness_type_0, Unset): affectedness_type_0 = UNSET else: @@ -401,8 +473,9 @@ def _parse_affectedness( pass if not isinstance(data, str): raise TypeError() + # } _affectedness_type_1 = data - affectedness_type_1: Union[Unset, BlankEnum] + affectedness_type_1: BlankEnum if isinstance(_affectedness_type_1, Unset): affectedness_type_1 = UNSET else: @@ -418,8 +491,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _resolution_type_0 = data - resolution_type_0: Union[Unset, ResolutionEnum] + resolution_type_0: ResolutionEnum if isinstance(_resolution_type_0, Unset): resolution_type_0 = UNSET else: @@ -430,8 +504,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _resolution_type_1 = data - resolution_type_1: Union[Unset, BlankEnum] + resolution_type_1: BlankEnum if isinstance(_resolution_type_1, Unset): resolution_type_1 = UNSET else: @@ -441,7 +516,14 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: resolution = _parse_resolution(d.pop("resolution", UNSET)) - ps_component = d.pop("ps_component", UNSET) + def _parse_ps_component(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + ps_component = _parse_ps_component(d.pop("ps_component", UNSET)) def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: if isinstance(data, Unset): @@ -449,8 +531,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _impact_type_0 = data - impact_type_0: Union[Unset, ImpactEnum] + impact_type_0: ImpactEnum if isinstance(_impact_type_0, Unset): impact_type_0 = UNSET else: @@ -461,8 +544,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _impact_type_1 = data - impact_type_1: Union[Unset, BlankEnum] + impact_type_1: BlankEnum if isinstance(_impact_type_1, Unset): impact_type_1 = UNSET else: @@ -472,10 +556,18 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: impact = _parse_impact(d.pop("impact", UNSET)) - purl = d.pop("purl", UNSET) + def _parse_purl(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + purl = _parse_purl(d.pop("purl", UNSET)) affect_post = cls( uuid=uuid, + flaw=flaw, ps_module=ps_module, ps_product=ps_product, trackers=trackers, @@ -484,7 +576,6 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: embargoed=embargoed, alerts=alerts, created_dt=created_dt, - flaw=flaw, affectedness=affectedness, resolution=resolution, ps_component=ps_component, @@ -498,25 +589,25 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, + "flaw": Union[None, UUID], "ps_module": str, "ps_product": str, - "trackers": List[Tracker], + "trackers": list["Tracker"], "delegated_resolution": str, - "cvss_scores": List[AffectCVSS], + "cvss_scores": list["AffectCVSS"], "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, - "flaw": str, "affectedness": Union[AffectednessEnum, BlankEnum], "resolution": Union[BlankEnum, ResolutionEnum], - "ps_component": str, + "ps_component": Union[None, str], "impact": Union[BlankEnum, ImpactEnum], - "purl": str, + "purl": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/affect_report_data.py b/osidb_bindings/bindings/python_client/models/affect_report_data.py index 2ed7d98..766e7b1 100644 --- a/osidb_bindings/bindings/python_client/models/affect_report_data.py +++ b/osidb_bindings/bindings/python_client/models/affect_report_data.py @@ -1,43 +1,54 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..models.affectedness_enum import AffectednessEnum from ..models.blank_enum import BlankEnum from ..models.resolution_enum import ResolutionEnum -from ..models.tracker_report_data import TrackerReportData from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.tracker_report_data import TrackerReportData + + T = TypeVar("T", bound="AffectReportData") -@attr.s(auto_attribs=True) +@_attrs_define class AffectReportData(OSIDBModel): - """ """ + """ + Attributes: + ps_module (str): + ps_component (str): + affectedness (Union[AffectednessEnum, BlankEnum, Unset]): + resolution (Union[BlankEnum, ResolutionEnum, Unset]): + trackers (Union[Unset, list['TrackerReportData']]): + """ ps_module: str ps_component: str affectedness: Union[AffectednessEnum, BlankEnum, Unset] = UNSET resolution: Union[BlankEnum, ResolutionEnum, Unset] = UNSET - trackers: Union[Unset, List[TrackerReportData]] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + trackers: Union[Unset, list["TrackerReportData"]] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: ps_module = self.ps_module + ps_component = self.ps_component + affectedness: Union[Unset, str] if isinstance(self.affectedness, Unset): affectedness = UNSET elif isinstance(self.affectedness, AffectednessEnum): affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = AffectednessEnum(self.affectedness).value else: affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = BlankEnum(self.affectedness).value resolution: Union[Unset, str] @@ -46,26 +57,24 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.resolution, ResolutionEnum): resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = ResolutionEnum(self.resolution).value else: resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = BlankEnum(self.resolution).value - trackers: Union[Unset, List[Dict[str, Any]]] = UNSET + trackers: Union[Unset, list[dict[str, Any]]] = UNSET if not isinstance(self.trackers, Unset): trackers = [] for trackers_item_data in self.trackers: - trackers_item: Dict[str, Any] = UNSET + trackers_item: dict[str, Any] = UNSET if not isinstance(trackers_item_data, Unset): trackers_item = trackers_item_data.to_dict() trackers.append(trackers_item) - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(ps_module, Unset): field_dict["ps_module"] = ps_module @@ -81,22 +90,23 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.tracker_report_data import TrackerReportData + d = src_dict.copy() ps_module = d.pop("ps_module", UNSET) ps_component = d.pop("ps_component", UNSET) - def _parse_affectedness( - data: object, - ) -> Union[AffectednessEnum, BlankEnum, Unset]: + def _parse_affectedness(data: object) -> Union[AffectednessEnum, BlankEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _affectedness_type_0 = data - affectedness_type_0: Union[Unset, AffectednessEnum] + affectedness_type_0: AffectednessEnum if isinstance(_affectedness_type_0, Unset): affectedness_type_0 = UNSET else: @@ -107,8 +117,9 @@ def _parse_affectedness( pass if not isinstance(data, str): raise TypeError() + # } _affectedness_type_1 = data - affectedness_type_1: Union[Unset, BlankEnum] + affectedness_type_1: BlankEnum if isinstance(_affectedness_type_1, Unset): affectedness_type_1 = UNSET else: @@ -124,8 +135,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _resolution_type_0 = data - resolution_type_0: Union[Unset, ResolutionEnum] + resolution_type_0: ResolutionEnum if isinstance(_resolution_type_0, Unset): resolution_type_0 = UNSET else: @@ -136,8 +148,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _resolution_type_1 = data - resolution_type_1: Union[Unset, BlankEnum] + resolution_type_1: BlankEnum if isinstance(_resolution_type_1, Unset): resolution_type_1 = UNSET else: @@ -149,18 +162,16 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: trackers = [] _trackers = d.pop("trackers", UNSET) - if _trackers is UNSET: - trackers = UNSET - else: - for trackers_item_data in _trackers or []: - _trackers_item = trackers_item_data - trackers_item: TrackerReportData - if isinstance(_trackers_item, Unset): - trackers_item = UNSET - else: - trackers_item = TrackerReportData.from_dict(_trackers_item) + for trackers_item_data in _trackers or []: + # } + _trackers_item = trackers_item_data + trackers_item: TrackerReportData + if isinstance(_trackers_item, Unset): + trackers_item = UNSET + else: + trackers_item = TrackerReportData.from_dict(_trackers_item) - trackers.append(trackers_item) + trackers.append(trackers_item) affect_report_data = cls( ps_module=ps_module, @@ -180,11 +191,11 @@ def get_fields(): "ps_component": str, "affectedness": Union[AffectednessEnum, BlankEnum], "resolution": Union[BlankEnum, ResolutionEnum], - "trackers": List[TrackerReportData], + "trackers": list["TrackerReportData"], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/affectedness_enum.py b/osidb_bindings/bindings/python_client/models/affectedness_enum.py index 313f7d1..2fb6fe5 100644 --- a/osidb_bindings/bindings/python_client/models/affectedness_enum.py +++ b/osidb_bindings/bindings/python_client/models/affectedness_enum.py @@ -2,8 +2,8 @@ class AffectednessEnum(str, Enum): - NEW = "NEW" AFFECTED = "AFFECTED" + NEW = "NEW" NOTAFFECTED = "NOTAFFECTED" def __str__(self) -> str: diff --git a/osidb_bindings/bindings/python_client/models/alert.py b/osidb_bindings/bindings/python_client/models/alert.py index 725c569..fbc9bc9 100644 --- a/osidb_bindings/bindings/python_client/models/alert.py +++ b/osidb_bindings/bindings/python_client/models/alert.py @@ -1,6 +1,8 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..models.alert_type_enum import AlertTypeEnum from ..types import UNSET, OSIDBModel, Unset @@ -8,33 +10,51 @@ T = TypeVar("T", bound="Alert") -@attr.s(auto_attribs=True) +@_attrs_define class Alert(OSIDBModel): - """Alerts indicate some inconsistency in a linked flaw, affect, tracker or other models.""" - - uuid: str + """Alerts indicate some inconsistency in a linked flaw, affect, tracker or other models. + + Attributes: + uuid (UUID): + name (str): + description (str): + parent_uuid (UUID): + parent_model (str): + alert_type (Union[Unset, AlertTypeEnum]): + resolution_steps (Union[Unset, str]): + """ + + uuid: UUID name: str description: str - parent_uuid: str + parent_uuid: UUID parent_model: str alert_type: Union[Unset, AlertTypeEnum] = UNSET resolution_steps: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid name = self.name + description = self.description - parent_uuid = self.parent_uuid + + parent_uuid: str = UNSET + if not isinstance(self.parent_uuid, Unset): + parent_uuid = str(self.parent_uuid) + parent_model = self.parent_model + alert_type: Union[Unset, str] = UNSET if not isinstance(self.alert_type, Unset): - alert_type = AlertTypeEnum(self.alert_type).value resolution_steps = self.resolution_steps - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid @@ -54,18 +74,31 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) name = d.pop("name", UNSET) description = d.pop("description", UNSET) - parent_uuid = d.pop("parent_uuid", UNSET) + # } + _parent_uuid = d.pop("parent_uuid", UNSET) + parent_uuid: UUID + if isinstance(_parent_uuid, Unset): + parent_uuid = UNSET + else: + parent_uuid = UUID(_parent_uuid) parent_model = d.pop("parent_model", UNSET) + # } _alert_type = d.pop("alert_type", UNSET) alert_type: Union[Unset, AlertTypeEnum] if isinstance(_alert_type, Unset): @@ -91,17 +124,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, "name": str, "description": str, - "parent_uuid": str, + "parent_uuid": UUID, "parent_model": str, "alert_type": AlertTypeEnum, "resolution_steps": str, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/alert_type_enum.py b/osidb_bindings/bindings/python_client/models/alert_type_enum.py index efdb766..35a0d4a 100644 --- a/osidb_bindings/bindings/python_client/models/alert_type_enum.py +++ b/osidb_bindings/bindings/python_client/models/alert_type_enum.py @@ -2,8 +2,8 @@ class AlertTypeEnum(str, Enum): - WARNING = "WARNING" ERROR = "ERROR" + WARNING = "WARNING" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/audit.py b/osidb_bindings/bindings/python_client/models/audit.py index 56bd0fb..8a9dd86 100644 --- a/osidb_bindings/bindings/python_client/models/audit.py +++ b/osidb_bindings/bindings/python_client/models/audit.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,9 +10,19 @@ T = TypeVar("T", bound="Audit") -@attr.s(auto_attribs=True) +@_attrs_define class Audit(OSIDBModel): - """ """ + """ + Attributes: + pgh_created_at (datetime.datetime): When the event was created. + pgh_slug (str): The unique identifier across all event tables. + pgh_obj_model (str): The object model. + pgh_label (str): The event label. + pgh_diff (Any): The diff between the previous event of the same label. + pgh_data (str): + pgh_obj_id (Union[None, Unset, str]): The primary key of the object. + pgh_context (Union[Unset, Any]): The context associated with the event. + """ pgh_created_at: datetime.datetime pgh_slug: str @@ -19,25 +30,34 @@ class Audit(OSIDBModel): pgh_label: str pgh_diff: Any pgh_data: str - pgh_obj_id: Union[Unset, None, str] = UNSET + pgh_obj_id: Union[None, Unset, str] = UNSET pgh_context: Union[Unset, Any] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: pgh_created_at: str = UNSET if not isinstance(self.pgh_created_at, Unset): pgh_created_at = self.pgh_created_at.isoformat() pgh_slug = self.pgh_slug + pgh_obj_model = self.pgh_obj_model + pgh_label = self.pgh_label + pgh_diff = self.pgh_diff pgh_data = self.pgh_data - pgh_obj_id = self.pgh_obj_id + + pgh_obj_id: Union[None, Unset, str] + if isinstance(self.pgh_obj_id, Unset): + pgh_obj_id = UNSET + else: + pgh_obj_id = self.pgh_obj_id + pgh_context = self.pgh_context - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(pgh_created_at, Unset): field_dict["pgh_created_at"] = pgh_created_at @@ -58,47 +78,40 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - pgh_created_at: str = UNSET + def to_multipart(self) -> dict[str, Any]: + pgh_created_at: bytes = UNSET if not isinstance(self.pgh_created_at, Unset): - pgh_created_at = self.pgh_created_at.isoformat() + pgh_created_at = self.pgh_created_at.isoformat().encode() - pgh_slug = ( - self.pgh_slug - if self.pgh_slug is UNSET - else (None, str(self.pgh_slug), "text/plain") - ) - pgh_obj_model = ( - self.pgh_obj_model - if self.pgh_obj_model is UNSET - else (None, str(self.pgh_obj_model), "text/plain") - ) - pgh_label = ( - self.pgh_label - if self.pgh_label is UNSET - else (None, str(self.pgh_label), "text/plain") - ) - pgh_diff = self.pgh_diff + pgh_slug = (None, str(self.pgh_slug).encode(), "text/plain") - pgh_data = ( - self.pgh_data - if self.pgh_data is UNSET - else (None, str(self.pgh_data), "text/plain") - ) - pgh_obj_id = ( - self.pgh_obj_id - if self.pgh_obj_id is UNSET - else (None, str(self.pgh_obj_id), "text/plain") - ) - pgh_context = self.pgh_context + pgh_obj_model = (None, str(self.pgh_obj_model).encode(), "text/plain") + + pgh_label = (None, str(self.pgh_label).encode(), "text/plain") + + pgh_diff = (None, str(self.pgh_diff).encode(), "text/plain") + + pgh_data = (None, str(self.pgh_data).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } + pgh_obj_id: Union[Unset, tuple[None, bytes, str]] + + if isinstance(self.pgh_obj_id, Unset): + pgh_obj_id = UNSET + elif isinstance(self.pgh_obj_id, str): + pgh_obj_id = (None, str(self.pgh_obj_id).encode(), "text/plain") + else: + pgh_obj_id = (None, str(self.pgh_obj_id).encode(), "text/plain") + + pgh_context = ( + self.pgh_context + if isinstance(self.pgh_context, Unset) + else (None, str(self.pgh_context).encode(), "text/plain") ) + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") + if not isinstance(pgh_created_at, Unset): field_dict["pgh_created_at"] = pgh_created_at if not isinstance(pgh_slug, Unset): @@ -119,8 +132,9 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _pgh_created_at = d.pop("pgh_created_at", UNSET) pgh_created_at: datetime.datetime if isinstance(_pgh_created_at, Unset): @@ -138,7 +152,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: pgh_data = d.pop("pgh_data", UNSET) - pgh_obj_id = d.pop("pgh_obj_id", UNSET) + def _parse_pgh_obj_id(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + pgh_obj_id = _parse_pgh_obj_id(d.pop("pgh_obj_id", UNSET)) pgh_context = d.pop("pgh_context", UNSET) @@ -165,12 +186,12 @@ def get_fields(): "pgh_label": str, "pgh_diff": Any, "pgh_data": str, - "pgh_obj_id": str, + "pgh_obj_id": Union[None, str], "pgh_context": Any, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/auth_token_create_response_200.py b/osidb_bindings/bindings/python_client/models/auth_token_create_response_200.py index 7c33c56..09ba8c7 100644 --- a/osidb_bindings/bindings/python_client/models/auth_token_create_response_200.py +++ b/osidb_bindings/bindings/python_client/models/auth_token_create_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,9 +10,19 @@ T = TypeVar("T", bound="AuthTokenCreateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class AuthTokenCreateResponse200(OSIDBModel): - """ """ + """ + Attributes: + username (str): + password (str): + access (str): + refresh (str): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ username: str password: str @@ -21,22 +32,28 @@ class AuthTokenCreateResponse200(OSIDBModel): env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: username = self.username + password = self.password + access = self.access + refresh = self.refresh + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(username, Unset): field_dict["username"] = username @@ -58,7 +75,7 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() username = d.pop("username", UNSET) @@ -68,6 +85,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: refresh = d.pop("refresh", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -109,7 +127,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/auth_token_refresh_create_response_200.py b/osidb_bindings/bindings/python_client/models/auth_token_refresh_create_response_200.py index 2da53dc..d373ac3 100644 --- a/osidb_bindings/bindings/python_client/models/auth_token_refresh_create_response_200.py +++ b/osidb_bindings/bindings/python_client/models/auth_token_refresh_create_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,9 +10,17 @@ T = TypeVar("T", bound="AuthTokenRefreshCreateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class AuthTokenRefreshCreateResponse200(OSIDBModel): - """ """ + """ + Attributes: + access (str): + refresh (str): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ access: str refresh: str @@ -19,20 +28,24 @@ class AuthTokenRefreshCreateResponse200(OSIDBModel): env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: access = self.access + refresh = self.refresh + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(access, Unset): field_dict["access"] = access @@ -50,12 +63,13 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() access = d.pop("access", UNSET) refresh = d.pop("refresh", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -93,7 +107,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/auth_token_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/auth_token_retrieve_response_200.py index 39dd50b..21b1b4e 100644 --- a/osidb_bindings/bindings/python_client/models/auth_token_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/auth_token_retrieve_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,9 +10,17 @@ T = TypeVar("T", bound="AuthTokenRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class AuthTokenRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + access (Union[Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + refresh (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ access: Union[Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET @@ -19,20 +28,24 @@ class AuthTokenRetrieveResponse200(OSIDBModel): refresh: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: access = self.access + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + refresh = self.refresh + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(access, Unset): field_dict["access"] = access @@ -50,10 +63,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() access = d.pop("access", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -93,7 +107,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/auth_token_verify_create_response_200.py b/osidb_bindings/bindings/python_client/models/auth_token_verify_create_response_200.py index cc99a97..60522e2 100644 --- a/osidb_bindings/bindings/python_client/models/auth_token_verify_create_response_200.py +++ b/osidb_bindings/bindings/python_client/models/auth_token_verify_create_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,28 +10,38 @@ T = TypeVar("T", bound="AuthTokenVerifyCreateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class AuthTokenVerifyCreateResponse200(OSIDBModel): - """ """ + """ + Attributes: + token (str): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ token: str dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: token = self.token + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(token, Unset): field_dict["token"] = token @@ -46,10 +57,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() token = d.pop("token", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -85,7 +97,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200.py index 5520599..50f22ad 100644 --- a/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200.py @@ -1,36 +1,45 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.collectors_api_v1_status_retrieve_response_200_collectors_item import ( - CollectorsApiV1StatusRetrieveResponse200CollectorsItem, -) from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.collectors_api_v1_status_retrieve_response_200_collectors_item import ( + CollectorsApiV1StatusRetrieveResponse200CollectorsItem, + ) + + T = TypeVar("T", bound="CollectorsApiV1StatusRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class CollectorsApiV1StatusRetrieveResponse200(OSIDBModel): - """ """ - - collectors: Union[ - Unset, List[CollectorsApiV1StatusRetrieveResponse200CollectorsItem] - ] = UNSET + """ + Attributes: + collectors (Union[Unset, list['CollectorsApiV1StatusRetrieveResponse200CollectorsItem']]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + collectors: Union[Unset, list["CollectorsApiV1StatusRetrieveResponse200CollectorsItem"]] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - collectors: Union[Unset, List[Dict[str, Any]]] = UNSET + def to_dict(self) -> dict[str, Any]: + collectors: Union[Unset, list[dict[str, Any]]] = UNSET if not isinstance(self.collectors, Unset): collectors = [] for collectors_item_data in self.collectors: - collectors_item: Dict[str, Any] = UNSET + collectors_item: dict[str, Any] = UNSET if not isinstance(collectors_item_data, Unset): collectors_item = collectors_item_data.to_dict() @@ -41,10 +50,12 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(collectors, Unset): field_dict["collectors"] = collectors @@ -60,25 +71,26 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.collectors_api_v1_status_retrieve_response_200_collectors_item import ( + CollectorsApiV1StatusRetrieveResponse200CollectorsItem, + ) + d = src_dict.copy() collectors = [] _collectors = d.pop("collectors", UNSET) - if _collectors is UNSET: - collectors = UNSET - else: - for collectors_item_data in _collectors or []: - _collectors_item = collectors_item_data - collectors_item: CollectorsApiV1StatusRetrieveResponse200CollectorsItem - if isinstance(_collectors_item, Unset): - collectors_item = UNSET - else: - collectors_item = CollectorsApiV1StatusRetrieveResponse200CollectorsItem.from_dict( - _collectors_item - ) - - collectors.append(collectors_item) - + for collectors_item_data in _collectors or []: + # } + _collectors_item = collectors_item_data + collectors_item: CollectorsApiV1StatusRetrieveResponse200CollectorsItem + if isinstance(_collectors_item, Unset): + collectors_item = UNSET + else: + collectors_item = CollectorsApiV1StatusRetrieveResponse200CollectorsItem.from_dict(_collectors_item) + + collectors.append(collectors_item) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -106,7 +118,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "collectors": List[CollectorsApiV1StatusRetrieveResponse200CollectorsItem], + "collectors": list["CollectorsApiV1StatusRetrieveResponse200CollectorsItem"], "dt": datetime.datetime, "env": str, "revision": str, @@ -114,7 +126,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item.py b/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item.py index d60fa3d..3a68dee 100644 --- a/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item.py +++ b/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item.py @@ -1,77 +1,92 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..models.collectors_api_v1_status_retrieve_response_200_collectors_item_data import ( CollectorsApiV1StatusRetrieveResponse200CollectorsItemData, ) -from ..models.collectors_api_v1_status_retrieve_response_200_collectors_item_error import ( - CollectorsApiV1StatusRetrieveResponse200CollectorsItemError, -) from ..models.collectors_api_v1_status_retrieve_response_200_collectors_item_state import ( CollectorsApiV1StatusRetrieveResponse200CollectorsItemState, ) from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.collectors_api_v1_status_retrieve_response_200_collectors_item_error_type_0 import ( + CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0, + ) + + T = TypeVar("T", bound="CollectorsApiV1StatusRetrieveResponse200CollectorsItem") -@attr.s(auto_attribs=True) +@_attrs_define class CollectorsApiV1StatusRetrieveResponse200CollectorsItem(OSIDBModel): - """ """ - - data: Union[ - Unset, CollectorsApiV1StatusRetrieveResponse200CollectorsItemData - ] = UNSET - depends_on: Union[Unset, List[str]] = UNSET - error: Union[ - Unset, None, CollectorsApiV1StatusRetrieveResponse200CollectorsItemError - ] = UNSET + """ + Attributes: + data (Union[Unset, CollectorsApiV1StatusRetrieveResponse200CollectorsItemData]): + depends_on (Union[Unset, list[str]]): + error (Union['CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0', None, Unset]): + is_complete (Union[Unset, bool]): + is_up2date (Union[Unset, bool]): + data_models (Union[Unset, list[str]]): + state (Union[Unset, CollectorsApiV1StatusRetrieveResponse200CollectorsItemState]): + updated_until (Union[Unset, datetime.datetime]): + """ + + data: Union[Unset, CollectorsApiV1StatusRetrieveResponse200CollectorsItemData] = UNSET + depends_on: Union[Unset, list[str]] = UNSET + error: Union["CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0", None, Unset] = UNSET is_complete: Union[Unset, bool] = UNSET is_up2date: Union[Unset, bool] = UNSET - data_models: Union[Unset, List[str]] = UNSET - state: Union[ - Unset, CollectorsApiV1StatusRetrieveResponse200CollectorsItemState - ] = UNSET + data_models: Union[Unset, list[str]] = UNSET + state: Union[Unset, CollectorsApiV1StatusRetrieveResponse200CollectorsItemState] = UNSET updated_until: Union[Unset, datetime.datetime] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + from ..models.collectors_api_v1_status_retrieve_response_200_collectors_item_error_type_0 import ( + CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0, + ) - def to_dict(self) -> Dict[str, Any]: data: Union[Unset, str] = UNSET if not isinstance(self.data, Unset): + data = CollectorsApiV1StatusRetrieveResponse200CollectorsItemData(self.data).value - data = CollectorsApiV1StatusRetrieveResponse200CollectorsItemData( - self.data - ).value - - depends_on: Union[Unset, List[str]] = UNSET + depends_on: Union[Unset, list[str]] = UNSET if not isinstance(self.depends_on, Unset): depends_on = self.depends_on - error: Union[Unset, None, Dict[str, Any]] = UNSET - if not isinstance(self.error, Unset): - error = self.error.to_dict() if self.error else None + error: Union[None, Unset, dict[str, Any]] + if isinstance(self.error, Unset): + error = UNSET + elif isinstance(self.error, CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0): + error = UNSET + if not isinstance(self.error, Unset): + error = self.error.to_dict() + + else: + error = self.error is_complete = self.is_complete + is_up2date = self.is_up2date - data_models: Union[Unset, List[str]] = UNSET + + data_models: Union[Unset, list[str]] = UNSET if not isinstance(self.data_models, Unset): data_models = self.data_models state: Union[Unset, str] = UNSET if not isinstance(self.state, Unset): - - state = CollectorsApiV1StatusRetrieveResponse200CollectorsItemState( - self.state - ).value + state = CollectorsApiV1StatusRetrieveResponse200CollectorsItemState(self.state).value updated_until: Union[Unset, str] = UNSET if not isinstance(self.updated_until, Unset): updated_until = self.updated_until.isoformat() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(data, Unset): field_dict["data"] = data @@ -93,8 +108,13 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.collectors_api_v1_status_retrieve_response_200_collectors_item_error_type_0 import ( + CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0, + ) + d = src_dict.copy() + # } _data = d.pop("data", UNSET) data: Union[Unset, CollectorsApiV1StatusRetrieveResponse200CollectorsItemData] if isinstance(_data, Unset): @@ -102,29 +122,42 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: data = CollectorsApiV1StatusRetrieveResponse200CollectorsItemData(_data) - depends_on = cast(List[str], d.pop("depends_on", UNSET)) - - _error = d.pop("error", UNSET) - error: Union[ - Unset, None, CollectorsApiV1StatusRetrieveResponse200CollectorsItemError - ] - if _error is None: - error = None - elif isinstance(_error, Unset): - error = UNSET - else: - error = ( - CollectorsApiV1StatusRetrieveResponse200CollectorsItemError.from_dict( - _error - ) - ) + depends_on = cast(list[str], d.pop("depends_on", UNSET)) + + def _parse_error( + data: object, + ) -> Union["CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0", None, Unset]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + # } + _error_type_0 = data + error_type_0: CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0 + if isinstance(_error_type_0, Unset): + error_type_0 = UNSET + else: + error_type_0 = CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0.from_dict( + _error_type_0 + ) + + return error_type_0 + except: # noqa: E722 + pass + return cast(Union["CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0", None, Unset], data) + + error = _parse_error(d.pop("error", UNSET)) is_complete = d.pop("is_complete", UNSET) is_up2date = d.pop("is_up2date", UNSET) - data_models = cast(List[str], d.pop("data_models", UNSET)) + data_models = cast(list[str], d.pop("data_models", UNSET)) + # } _state = d.pop("state", UNSET) state: Union[Unset, CollectorsApiV1StatusRetrieveResponse200CollectorsItemState] if isinstance(_state, Unset): @@ -132,6 +165,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: state = CollectorsApiV1StatusRetrieveResponse200CollectorsItemState(_state) + # } _updated_until = d.pop("updated_until", UNSET) updated_until: Union[Unset, datetime.datetime] if isinstance(_updated_until, Unset): @@ -150,26 +184,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: updated_until=updated_until, ) - collectors_api_v1_status_retrieve_response_200_collectors_item.additional_properties = ( - d - ) + collectors_api_v1_status_retrieve_response_200_collectors_item.additional_properties = d return collectors_api_v1_status_retrieve_response_200_collectors_item @staticmethod def get_fields(): return { "data": CollectorsApiV1StatusRetrieveResponse200CollectorsItemData, - "depends_on": List[str], - "error": CollectorsApiV1StatusRetrieveResponse200CollectorsItemError, + "depends_on": list[str], + "error": Union["CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0", None], "is_complete": bool, "is_up2date": bool, - "data_models": List[str], + "data_models": list[str], "state": CollectorsApiV1StatusRetrieveResponse200CollectorsItemState, "updated_until": datetime.datetime, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_data.py b/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_data.py index 3bdeb3e..c029e84 100644 --- a/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_data.py +++ b/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_data.py @@ -2,9 +2,9 @@ class CollectorsApiV1StatusRetrieveResponse200CollectorsItemData(str, Enum): + COMPLETE = "COMPLETE" EMPTY = "EMPTY" PARTIAL = "PARTIAL" - COMPLETE = "COMPLETE" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_error.py b/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_error_type_0.py similarity index 64% rename from osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_error.py rename to osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_error_type_0.py index b46f572..205db7d 100644 --- a/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_error.py +++ b/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_error_type_0.py @@ -1,41 +1,39 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import OSIDBModel -T = TypeVar("T", bound="CollectorsApiV1StatusRetrieveResponse200CollectorsItemError") +T = TypeVar("T", bound="CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0") -@attr.s(auto_attribs=True) -class CollectorsApiV1StatusRetrieveResponse200CollectorsItemError(OSIDBModel): +@_attrs_define +class CollectorsApiV1StatusRetrieveResponse200CollectorsItemErrorType0(OSIDBModel): """ """ - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() - collectors_api_v1_status_retrieve_response_200_collectors_item_error = cls() + collectors_api_v1_status_retrieve_response_200_collectors_item_error_type_0 = cls() - collectors_api_v1_status_retrieve_response_200_collectors_item_error.additional_properties = ( - d - ) - return collectors_api_v1_status_retrieve_response_200_collectors_item_error + collectors_api_v1_status_retrieve_response_200_collectors_item_error_type_0.additional_properties = d + return collectors_api_v1_status_retrieve_response_200_collectors_item_error_type_0 @staticmethod def get_fields(): return {} @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_state.py b/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_state.py index fcf8828..90341b5 100644 --- a/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_state.py +++ b/osidb_bindings/bindings/python_client/models/collectors_api_v1_status_retrieve_response_200_collectors_item_state.py @@ -2,8 +2,8 @@ class CollectorsApiV1StatusRetrieveResponse200CollectorsItemState(str, Enum): - PENDING = "PENDING" BLOCKED = "BLOCKED" + PENDING = "PENDING" READY = "READY" RUNNING = "RUNNING" diff --git a/osidb_bindings/bindings/python_client/models/collectors_healthy_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/collectors_healthy_retrieve_response_200.py index 8570429..99c8b17 100644 --- a/osidb_bindings/bindings/python_client/models/collectors_healthy_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/collectors_healthy_retrieve_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="CollectorsHealthyRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class CollectorsHealthyRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/collectors_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/collectors_retrieve_response_200.py index a6f9dba..807de6a 100644 --- a/osidb_bindings/bindings/python_client/models/collectors_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/collectors_retrieve_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union, cast +from typing import Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,31 +10,40 @@ T = TypeVar("T", bound="CollectorsRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class CollectorsRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + index (Union[Unset, list[str]]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET - index: Union[Unset, List[str]] = UNSET + index: Union[Unset, list[str]] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env - index: Union[Unset, List[str]] = UNSET + + index: Union[Unset, list[str]] = UNSET if not isinstance(self.index, Unset): index = self.index revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -49,8 +59,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -60,7 +71,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: env = d.pop("env", UNSET) - index = cast(List[str], d.pop("index", UNSET)) + index = cast(list[str], d.pop("index", UNSET)) revision = d.pop("revision", UNSET) @@ -82,13 +93,13 @@ def get_fields(): return { "dt": datetime.datetime, "env": str, - "index": List[str], + "index": list[str], "revision": str, "version": str, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/comment.py b/osidb_bindings/bindings/python_client/models/comment.py index 01b4335..708cbca 100644 --- a/osidb_bindings/bindings/python_client/models/comment.py +++ b/osidb_bindings/bindings/python_client/models/comment.py @@ -1,38 +1,60 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="Comment") -@attr.s(auto_attribs=True) +@_attrs_define class Comment(OSIDBModel): - """FlawComment serializer for use by FlawSerializer""" - - uuid: str + """FlawComment serializer for use by FlawSerializer + + Attributes: + uuid (UUID): + text (str): + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + external_system_id (Union[Unset, str]): + order (Union[None, Unset, int]): + creator (Union[Unset, str]): + is_private (Union[Unset, bool]): + """ + + uuid: UUID text: str - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime external_system_id: Union[Unset, str] = UNSET - order: Union[Unset, None, int] = UNSET + order: Union[None, Unset, int] = UNSET creator: Union[Unset, str] = UNSET is_private: Union[Unset, bool] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid text = self.text - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -47,11 +69,18 @@ def to_dict(self) -> Dict[str, Any]: updated_dt = self.updated_dt.isoformat() external_system_id = self.external_system_id - order = self.order + + order: Union[None, Unset, int] + if isinstance(self.order, Unset): + order = UNSET + else: + order = self.order + creator = self.creator + is_private = self.is_private - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid @@ -75,27 +104,34 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) text = d.pop("text", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -103,6 +139,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -112,7 +149,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: external_system_id = d.pop("external_system_id", UNSET) - order = d.pop("order", UNSET) + def _parse_order(data: object) -> Union[None, Unset, int]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, int], data) + + order = _parse_order(d.pop("order", UNSET)) creator = d.pop("creator", UNSET) @@ -136,19 +180,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, "text": str, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "external_system_id": str, - "order": int, + "order": Union[None, int], "creator": str, "is_private": bool, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/epss.py b/osidb_bindings/bindings/python_client/models/epss.py index 2839a56..c4ba895 100644 --- a/osidb_bindings/bindings/python_client/models/epss.py +++ b/osidb_bindings/bindings/python_client/models/epss.py @@ -1,25 +1,31 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import UNSET, OSIDBModel, Unset T = TypeVar("T", bound="EPSS") -@attr.s(auto_attribs=True) +@_attrs_define class EPSS(OSIDBModel): - """ """ + """ + Attributes: + cve (str): + epss (float): + """ cve: str epss: float - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cve = self.cve + epss = self.epss - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cve, Unset): field_dict["cve"] = cve @@ -29,7 +35,7 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() cve = d.pop("cve", UNSET) @@ -51,7 +57,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/erratum.py b/osidb_bindings/bindings/python_client/models/erratum.py index 5d94997..577e01a 100644 --- a/osidb_bindings/bindings/python_client/models/erratum.py +++ b/osidb_bindings/bindings/python_client/models/erratum.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Optional, Type, TypeVar +from typing import Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,20 +10,40 @@ T = TypeVar("T", bound="Erratum") -@attr.s(auto_attribs=True) +@_attrs_define class Erratum(OSIDBModel): - """Erratum serializer""" + """Erratum serializer + + Attributes: + et_id (int): + advisory_name (str): + shipped_dt (Union[None, datetime.datetime]): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + """ et_id: int advisory_name: str + shipped_dt: Union[None, datetime.datetime] created_dt: datetime.datetime updated_dt: datetime.datetime - shipped_dt: Optional[datetime.datetime] - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: et_id = self.et_id + advisory_name = self.advisory_name + + shipped_dt: Union[None, str] + if isinstance(self.shipped_dt, datetime.datetime): + shipped_dt = UNSET + if not isinstance(self.shipped_dt, Unset): + shipped_dt = self.shipped_dt.isoformat() + + else: + shipped_dt = self.shipped_dt + created_dt: str = UNSET if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() @@ -31,32 +52,50 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - shipped_dt: Optional[str] = UNSET - if not isinstance(self.shipped_dt, Unset): - shipped_dt = self.shipped_dt.isoformat() if self.shipped_dt else None - - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(et_id, Unset): field_dict["et_id"] = et_id if not isinstance(advisory_name, Unset): field_dict["advisory_name"] = advisory_name + if not isinstance(shipped_dt, Unset): + field_dict["shipped_dt"] = shipped_dt if not isinstance(created_dt, Unset): field_dict["created_dt"] = created_dt if not isinstance(updated_dt, Unset): field_dict["updated_dt"] = updated_dt - if not isinstance(shipped_dt, Unset): - field_dict["shipped_dt"] = shipped_dt return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() et_id = d.pop("et_id", UNSET) advisory_name = d.pop("advisory_name", UNSET) + def _parse_shipped_dt(data: object) -> Union[None, datetime.datetime]: + if data is None: + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _shipped_dt_type_0 = data + shipped_dt_type_0: datetime.datetime + if isinstance(_shipped_dt_type_0, Unset): + shipped_dt_type_0 = UNSET + else: + shipped_dt_type_0 = isoparse(_shipped_dt_type_0) + + return shipped_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, datetime.datetime], data) + + shipped_dt = _parse_shipped_dt(d.pop("shipped_dt", UNSET)) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -64,6 +103,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -71,21 +111,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - _shipped_dt = d.pop("shipped_dt", UNSET) - shipped_dt: Optional[datetime.datetime] - if _shipped_dt is None: - shipped_dt = None - elif isinstance(_shipped_dt, Unset): - shipped_dt = UNSET - else: - shipped_dt = isoparse(_shipped_dt) - erratum = cls( et_id=et_id, advisory_name=advisory_name, + shipped_dt=shipped_dt, created_dt=created_dt, updated_dt=updated_dt, - shipped_dt=shipped_dt, ) erratum.additional_properties = d @@ -96,13 +127,13 @@ def get_fields(): return { "et_id": int, "advisory_name": str, + "shipped_dt": Union[None, datetime.datetime], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "shipped_dt": datetime.datetime, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploit_only_report_data.py b/osidb_bindings/bindings/python_client/models/exploit_only_report_data.py index 148efc3..f0269b2 100644 --- a/osidb_bindings/bindings/python_client/models/exploit_only_report_data.py +++ b/osidb_bindings/bindings/python_client/models/exploit_only_report_data.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..models.exploit_only_report_data_source_enum import ( @@ -13,40 +14,53 @@ T = TypeVar("T", bound="ExploitOnlyReportData") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitOnlyReportData(OSIDBModel): - """ """ + """ + Attributes: + cve (str): + source (ExploitOnlyReportDataSourceEnum): + maturity_preliminary (MaturityPreliminaryEnum): + flaw (bool): + date (Union[None, Unset, datetime.date]): + reference (Union[Unset, str]): Default: 'N/A'. + """ cve: str source: ExploitOnlyReportDataSourceEnum maturity_preliminary: MaturityPreliminaryEnum flaw: bool - date: Union[Unset, None, datetime.date] = UNSET + date: Union[None, Unset, datetime.date] = UNSET reference: Union[Unset, str] = "N/A" - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cve = self.cve + source: str = UNSET if not isinstance(self.source, Unset): - source = ExploitOnlyReportDataSourceEnum(self.source).value maturity_preliminary: int = UNSET if not isinstance(self.maturity_preliminary, Unset): - - maturity_preliminary = MaturityPreliminaryEnum( - self.maturity_preliminary - ).value + maturity_preliminary = MaturityPreliminaryEnum(self.maturity_preliminary).value flaw = self.flaw - date: Union[Unset, None, str] = UNSET - if not isinstance(self.date, Unset): - date = self.date.isoformat() if self.date else None + + date: Union[None, Unset, str] + if isinstance(self.date, Unset): + date = UNSET + elif isinstance(self.date, datetime.date): + date = UNSET + if not isinstance(self.date, Unset): + date = self.date.isoformat() + + else: + date = self.date reference = self.reference - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cve, Unset): field_dict["cve"] = cve @@ -64,10 +78,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() cve = d.pop("cve", UNSET) + # } _source = d.pop("source", UNSET) source: ExploitOnlyReportDataSourceEnum if isinstance(_source, Unset): @@ -75,6 +90,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: source = ExploitOnlyReportDataSourceEnum(_source) + # } _maturity_preliminary = d.pop("maturity_preliminary", UNSET) maturity_preliminary: MaturityPreliminaryEnum if isinstance(_maturity_preliminary, Unset): @@ -84,14 +100,28 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: flaw = d.pop("flaw", UNSET) - _date = d.pop("date", UNSET) - date: Union[Unset, None, datetime.date] - if _date is None: - date = None - elif isinstance(_date, Unset): - date = UNSET - else: - date = isoparse(_date).date() + def _parse_date(data: object) -> Union[None, Unset, datetime.date]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _date_type_0 = data + date_type_0: datetime.date + if isinstance(_date_type_0, Unset): + date_type_0 = UNSET + else: + date_type_0 = isoparse(_date_type_0).date() + + return date_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.date], data) + + date = _parse_date(d.pop("date", UNSET)) reference = d.pop("reference", UNSET) @@ -114,12 +144,12 @@ def get_fields(): "source": ExploitOnlyReportDataSourceEnum, "maturity_preliminary": MaturityPreliminaryEnum, "flaw": bool, - "date": datetime.date, + "date": Union[None, datetime.date], "reference": str, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploit_only_report_data_source_enum.py b/osidb_bindings/bindings/python_client/models/exploit_only_report_data_source_enum.py index f69d1a3..544cbe0 100644 --- a/osidb_bindings/bindings/python_client/models/exploit_only_report_data_source_enum.py +++ b/osidb_bindings/bindings/python_client/models/exploit_only_report_data_source_enum.py @@ -3,8 +3,8 @@ class ExploitOnlyReportDataSourceEnum(str, Enum): CISA = "CISA" - METASPLOIT = "Metasploit" EXPLOIT_DB = "Exploit-DB" + METASPLOIT = "Metasploit" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_collect_update_response_200.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_collect_update_response_200.py index 37ee758..0be921d 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_collect_update_response_200.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_collect_update_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,28 +10,38 @@ T = TypeVar("T", bound="ExploitsApiV1CollectUpdateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1CollectUpdateResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + result_cisa (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET result_cisa: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + result_cisa = self.result_cisa + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -46,8 +57,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -85,7 +97,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_cve_map_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_cve_map_retrieve_response_200.py index aeaf809..58c7454 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_cve_map_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_cve_map_retrieve_response_200.py @@ -1,31 +1,43 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.exploits_api_v1_cve_map_retrieve_response_200_cves import ( - ExploitsApiV1CveMapRetrieveResponse200Cves, -) from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.exploits_api_v1_cve_map_retrieve_response_200_cves import ( + ExploitsApiV1CveMapRetrieveResponse200Cves, + ) + + T = TypeVar("T", bound="ExploitsApiV1CveMapRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1CveMapRetrieveResponse200(OSIDBModel): - """ """ - - cves: Union[Unset, ExploitsApiV1CveMapRetrieveResponse200Cves] = UNSET + """ + Attributes: + cves (Union[Unset, ExploitsApiV1CveMapRetrieveResponse200Cves]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + page_size (Union[Unset, int]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + cves: Union[Unset, "ExploitsApiV1CveMapRetrieveResponse200Cves"] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET page_size: Union[Unset, int] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - cves: Union[Unset, Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + cves: Union[Unset, dict[str, Any]] = UNSET if not isinstance(self.cves, Unset): cves = self.cves.to_dict() @@ -34,11 +46,14 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + page_size = self.page_size + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cves, Unset): field_dict["cves"] = cves @@ -56,8 +71,13 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.exploits_api_v1_cve_map_retrieve_response_200_cves import ( + ExploitsApiV1CveMapRetrieveResponse200Cves, + ) + d = src_dict.copy() + # } _cves = d.pop("cves", UNSET) cves: Union[Unset, ExploitsApiV1CveMapRetrieveResponse200Cves] if isinstance(_cves, Unset): @@ -65,6 +85,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cves = ExploitsApiV1CveMapRetrieveResponse200Cves.from_dict(_cves) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -104,7 +125,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_cve_map_retrieve_response_200_cves.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_cve_map_retrieve_response_200_cves.py index 7d1ccb8..f9b60aa 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_cve_map_retrieve_response_200_cves.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_cve_map_retrieve_response_200_cves.py @@ -1,27 +1,27 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import OSIDBModel T = TypeVar("T", bound="ExploitsApiV1CveMapRetrieveResponse200Cves") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1CveMapRetrieveResponse200Cves(OSIDBModel): """ """ - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() exploits_api_v1_cve_map_retrieve_response_200_cves = cls() @@ -33,7 +33,7 @@ def get_fields(): return {} @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_epss_list_response_200.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_epss_list_response_200.py index f274f7e..aadb603 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_epss_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_epss_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.epss import EPSS from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.epss import EPSS + + T = TypeVar("T", bound="ExploitsApiV1EpssListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1EpssListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['EPSS']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[EPSS] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["EPSS"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.epss import EPSS + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: EPSS - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = EPSS.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: EPSS + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = EPSS.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[EPSS], - "next": str, - "previous": str, + "results": list["EPSS"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_flaw_data_list_response_200.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_flaw_data_list_response_200.py index 7fdcd55..1a1c0d3 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_flaw_data_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_flaw_data_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw_report_data import FlawReportData from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_report_data import FlawReportData + + T = TypeVar("T", bound="ExploitsApiV1FlawDataListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1FlawDataListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['FlawReportData']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[FlawReportData] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["FlawReportData"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_report_data import FlawReportData + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: FlawReportData - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = FlawReportData.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: FlawReportData + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = FlawReportData.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[FlawReportData], - "next": str, - "previous": str, + "results": list["FlawReportData"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_data_list_response_200.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_data_list_response_200.py index 8410479..31efa2f 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_data_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_data_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.exploit_only_report_data import ExploitOnlyReportData from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.exploit_only_report_data import ExploitOnlyReportData + + T = TypeVar("T", bound="ExploitsApiV1ReportDataListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1ReportDataListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['ExploitOnlyReportData']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[ExploitOnlyReportData] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["ExploitOnlyReportData"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.exploit_only_report_data import ExploitOnlyReportData + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: ExploitOnlyReportData - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = ExploitOnlyReportData.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: ExploitOnlyReportData + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = ExploitOnlyReportData.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[ExploitOnlyReportData], - "next": str, - "previous": str, + "results": list["ExploitOnlyReportData"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200.py index 5fc5a3b..ffbb19f 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200.py @@ -1,86 +1,99 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.exploits_api_v1_report_date_retrieve_response_200_action_required_item import ( - ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem, -) -from ..models.exploits_api_v1_report_date_retrieve_response_200_no_action_item import ( - ExploitsApiV1ReportDateRetrieveResponse200NoActionItem, -) -from ..models.exploits_api_v1_report_date_retrieve_response_200_not_relevant_item import ( - ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem, -) from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.exploits_api_v1_report_date_retrieve_response_200_action_required_item import ( + ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem, + ) + from ..models.exploits_api_v1_report_date_retrieve_response_200_no_action_item import ( + ExploitsApiV1ReportDateRetrieveResponse200NoActionItem, + ) + from ..models.exploits_api_v1_report_date_retrieve_response_200_not_relevant_item import ( + ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem, + ) + + T = TypeVar("T", bound="ExploitsApiV1ReportDateRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1ReportDateRetrieveResponse200(OSIDBModel): - """ """ - - action_required: Union[ - Unset, List[ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem] - ] = UNSET + """ + Attributes: + action_required (Union[Unset, list['ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem']]): + cutoff_date (Union[Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + evaluated_cves (Union[Unset, int]): + no_action (Union[Unset, list['ExploitsApiV1ReportDateRetrieveResponse200NoActionItem']]): + not_relevant (Union[Unset, list['ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem']]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + action_required: Union[Unset, list["ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem"]] = UNSET cutoff_date: Union[Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET evaluated_cves: Union[Unset, int] = UNSET - no_action: Union[ - Unset, List[ExploitsApiV1ReportDateRetrieveResponse200NoActionItem] - ] = UNSET - not_relevant: Union[ - Unset, List[ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem] - ] = UNSET + no_action: Union[Unset, list["ExploitsApiV1ReportDateRetrieveResponse200NoActionItem"]] = UNSET + not_relevant: Union[Unset, list["ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem"]] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - action_required: Union[Unset, List[Dict[str, Any]]] = UNSET + def to_dict(self) -> dict[str, Any]: + action_required: Union[Unset, list[dict[str, Any]]] = UNSET if not isinstance(self.action_required, Unset): action_required = [] for action_required_item_data in self.action_required: - action_required_item: Dict[str, Any] = UNSET + action_required_item: dict[str, Any] = UNSET if not isinstance(action_required_item_data, Unset): action_required_item = action_required_item_data.to_dict() action_required.append(action_required_item) cutoff_date = self.cutoff_date + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + evaluated_cves = self.evaluated_cves - no_action: Union[Unset, List[Dict[str, Any]]] = UNSET + + no_action: Union[Unset, list[dict[str, Any]]] = UNSET if not isinstance(self.no_action, Unset): no_action = [] for no_action_item_data in self.no_action: - no_action_item: Dict[str, Any] = UNSET + no_action_item: dict[str, Any] = UNSET if not isinstance(no_action_item_data, Unset): no_action_item = no_action_item_data.to_dict() no_action.append(no_action_item) - not_relevant: Union[Unset, List[Dict[str, Any]]] = UNSET + not_relevant: Union[Unset, list[dict[str, Any]]] = UNSET if not isinstance(self.not_relevant, Unset): not_relevant = [] for not_relevant_item_data in self.not_relevant: - not_relevant_item: Dict[str, Any] = UNSET + not_relevant_item: dict[str, Any] = UNSET if not isinstance(not_relevant_item_data, Unset): not_relevant_item = not_relevant_item_data.to_dict() not_relevant.append(not_relevant_item) revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(action_required, Unset): field_dict["action_required"] = action_required @@ -104,27 +117,36 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.exploits_api_v1_report_date_retrieve_response_200_action_required_item import ( + ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem, + ) + from ..models.exploits_api_v1_report_date_retrieve_response_200_no_action_item import ( + ExploitsApiV1ReportDateRetrieveResponse200NoActionItem, + ) + from ..models.exploits_api_v1_report_date_retrieve_response_200_not_relevant_item import ( + ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem, + ) + d = src_dict.copy() action_required = [] _action_required = d.pop("action_required", UNSET) - if _action_required is UNSET: - action_required = UNSET - else: - for action_required_item_data in _action_required or []: - _action_required_item = action_required_item_data - action_required_item: ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem - if isinstance(_action_required_item, Unset): - action_required_item = UNSET - else: - action_required_item = ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem.from_dict( - _action_required_item - ) - - action_required.append(action_required_item) + for action_required_item_data in _action_required or []: + # } + _action_required_item = action_required_item_data + action_required_item: ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem + if isinstance(_action_required_item, Unset): + action_required_item = UNSET + else: + action_required_item = ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem.from_dict( + _action_required_item + ) + + action_required.append(action_required_item) cutoff_date = d.pop("cutoff_date", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -138,37 +160,31 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: no_action = [] _no_action = d.pop("no_action", UNSET) - if _no_action is UNSET: - no_action = UNSET - else: - for no_action_item_data in _no_action or []: - _no_action_item = no_action_item_data - no_action_item: ExploitsApiV1ReportDateRetrieveResponse200NoActionItem - if isinstance(_no_action_item, Unset): - no_action_item = UNSET - else: - no_action_item = ExploitsApiV1ReportDateRetrieveResponse200NoActionItem.from_dict( - _no_action_item - ) + for no_action_item_data in _no_action or []: + # } + _no_action_item = no_action_item_data + no_action_item: ExploitsApiV1ReportDateRetrieveResponse200NoActionItem + if isinstance(_no_action_item, Unset): + no_action_item = UNSET + else: + no_action_item = ExploitsApiV1ReportDateRetrieveResponse200NoActionItem.from_dict(_no_action_item) - no_action.append(no_action_item) + no_action.append(no_action_item) not_relevant = [] _not_relevant = d.pop("not_relevant", UNSET) - if _not_relevant is UNSET: - not_relevant = UNSET - else: - for not_relevant_item_data in _not_relevant or []: - _not_relevant_item = not_relevant_item_data - not_relevant_item: ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem - if isinstance(_not_relevant_item, Unset): - not_relevant_item = UNSET - else: - not_relevant_item = ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem.from_dict( - _not_relevant_item - ) - - not_relevant.append(not_relevant_item) + for not_relevant_item_data in _not_relevant or []: + # } + _not_relevant_item = not_relevant_item_data + not_relevant_item: ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem + if isinstance(_not_relevant_item, Unset): + not_relevant_item = UNSET + else: + not_relevant_item = ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem.from_dict( + _not_relevant_item + ) + + not_relevant.append(not_relevant_item) revision = d.pop("revision", UNSET) @@ -192,23 +208,19 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "action_required": List[ - ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem - ], + "action_required": list["ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem"], "cutoff_date": str, "dt": datetime.datetime, "env": str, "evaluated_cves": int, - "no_action": List[ExploitsApiV1ReportDateRetrieveResponse200NoActionItem], - "not_relevant": List[ - ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem - ], + "no_action": list["ExploitsApiV1ReportDateRetrieveResponse200NoActionItem"], + "not_relevant": list["ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem"], "revision": str, "version": str, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200_action_required_item.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200_action_required_item.py index a677fcd..f799183 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200_action_required_item.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200_action_required_item.py @@ -1,33 +1,31 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import OSIDBModel T = TypeVar("T", bound="ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1ReportDateRetrieveResponse200ActionRequiredItem(OSIDBModel): """ """ - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() exploits_api_v1_report_date_retrieve_response_200_action_required_item = cls() - exploits_api_v1_report_date_retrieve_response_200_action_required_item.additional_properties = ( - d - ) + exploits_api_v1_report_date_retrieve_response_200_action_required_item.additional_properties = d return exploits_api_v1_report_date_retrieve_response_200_action_required_item @staticmethod @@ -35,7 +33,7 @@ def get_fields(): return {} @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200_no_action_item.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200_no_action_item.py index 0665adb..2c14488 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200_no_action_item.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200_no_action_item.py @@ -1,33 +1,31 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import OSIDBModel T = TypeVar("T", bound="ExploitsApiV1ReportDateRetrieveResponse200NoActionItem") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1ReportDateRetrieveResponse200NoActionItem(OSIDBModel): """ """ - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() exploits_api_v1_report_date_retrieve_response_200_no_action_item = cls() - exploits_api_v1_report_date_retrieve_response_200_no_action_item.additional_properties = ( - d - ) + exploits_api_v1_report_date_retrieve_response_200_no_action_item.additional_properties = d return exploits_api_v1_report_date_retrieve_response_200_no_action_item @staticmethod @@ -35,7 +33,7 @@ def get_fields(): return {} @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200_not_relevant_item.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200_not_relevant_item.py index 6a54ca6..612009d 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200_not_relevant_item.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_date_retrieve_response_200_not_relevant_item.py @@ -1,33 +1,31 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import OSIDBModel T = TypeVar("T", bound="ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1ReportDateRetrieveResponse200NotRelevantItem(OSIDBModel): """ """ - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() exploits_api_v1_report_date_retrieve_response_200_not_relevant_item = cls() - exploits_api_v1_report_date_retrieve_response_200_not_relevant_item.additional_properties = ( - d - ) + exploits_api_v1_report_date_retrieve_response_200_not_relevant_item.additional_properties = d return exploits_api_v1_report_date_retrieve_response_200_not_relevant_item @staticmethod @@ -35,7 +33,7 @@ def get_fields(): return {} @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_explanations_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_explanations_retrieve_response_200.py index 4e57c1f..55af84e 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_explanations_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_explanations_retrieve_response_200.py @@ -1,52 +1,65 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.exploits_api_v1_report_explanations_retrieve_response_200_explanations_item import ( - ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem, -) from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.exploits_api_v1_report_explanations_retrieve_response_200_explanations_item import ( + ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem, + ) + + T = TypeVar("T", bound="ExploitsApiV1ReportExplanationsRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1ReportExplanationsRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + explanations (Union[Unset, list['ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem']]): + page_size (Union[Unset, int]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET - explanations: Union[ - Unset, List[ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem] - ] = UNSET + explanations: Union[Unset, list["ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem"]] = UNSET page_size: Union[Unset, int] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env - explanations: Union[Unset, List[Dict[str, Any]]] = UNSET + + explanations: Union[Unset, list[dict[str, Any]]] = UNSET if not isinstance(self.explanations, Unset): explanations = [] for explanations_item_data in self.explanations: - explanations_item: Dict[str, Any] = UNSET + explanations_item: dict[str, Any] = UNSET if not isinstance(explanations_item_data, Unset): explanations_item = explanations_item_data.to_dict() explanations.append(explanations_item) page_size = self.page_size + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -64,8 +77,13 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.exploits_api_v1_report_explanations_retrieve_response_200_explanations_item import ( + ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem, + ) + d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,20 +95,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: explanations = [] _explanations = d.pop("explanations", UNSET) - if _explanations is UNSET: - explanations = UNSET - else: - for explanations_item_data in _explanations or []: - _explanations_item = explanations_item_data - explanations_item: ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem - if isinstance(_explanations_item, Unset): - explanations_item = UNSET - else: - explanations_item = ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem.from_dict( - _explanations_item - ) - - explanations.append(explanations_item) + for explanations_item_data in _explanations or []: + # } + _explanations_item = explanations_item_data + explanations_item: ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem + if isinstance(_explanations_item, Unset): + explanations_item = UNSET + else: + explanations_item = ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem.from_dict( + _explanations_item + ) + + explanations.append(explanations_item) page_size = d.pop("page_size", UNSET) @@ -107,9 +123,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: version=version, ) - exploits_api_v1_report_explanations_retrieve_response_200.additional_properties = ( - d - ) + exploits_api_v1_report_explanations_retrieve_response_200.additional_properties = d return exploits_api_v1_report_explanations_retrieve_response_200 @staticmethod @@ -117,16 +131,14 @@ def get_fields(): return { "dt": datetime.datetime, "env": str, - "explanations": List[ - ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem - ], + "explanations": list["ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem"], "page_size": int, "revision": str, "version": str, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_explanations_retrieve_response_200_explanations_item.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_explanations_retrieve_response_200_explanations_item.py index 55e16ab..3eb612f 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_explanations_retrieve_response_200_explanations_item.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_explanations_retrieve_response_200_explanations_item.py @@ -1,47 +1,39 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import OSIDBModel -T = TypeVar( - "T", bound="ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem" -) +T = TypeVar("T", bound="ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1ReportExplanationsRetrieveResponse200ExplanationsItem(OSIDBModel): """ """ - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() - exploits_api_v1_report_explanations_retrieve_response_200_explanations_item = ( - cls() - ) + exploits_api_v1_report_explanations_retrieve_response_200_explanations_item = cls() - exploits_api_v1_report_explanations_retrieve_response_200_explanations_item.additional_properties = ( - d - ) - return ( - exploits_api_v1_report_explanations_retrieve_response_200_explanations_item - ) + exploits_api_v1_report_explanations_retrieve_response_200_explanations_item.additional_properties = d + return exploits_api_v1_report_explanations_retrieve_response_200_explanations_item @staticmethod def get_fields(): return {} @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_pending_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_pending_retrieve_response_200.py index 5f14d69..24f01ee 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_pending_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_pending_retrieve_response_200.py @@ -1,52 +1,65 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.exploits_api_v1_report_pending_retrieve_response_200_pending_actions_item import ( - ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem, -) from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.exploits_api_v1_report_pending_retrieve_response_200_pending_actions_item import ( + ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem, + ) + + T = TypeVar("T", bound="ExploitsApiV1ReportPendingRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1ReportPendingRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + pending_actions (Union[Unset, list['ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem']]): + pending_actions_count (Union[Unset, int]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET - pending_actions: Union[ - Unset, List[ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem] - ] = UNSET + pending_actions: Union[Unset, list["ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem"]] = UNSET pending_actions_count: Union[Unset, int] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env - pending_actions: Union[Unset, List[Dict[str, Any]]] = UNSET + + pending_actions: Union[Unset, list[dict[str, Any]]] = UNSET if not isinstance(self.pending_actions, Unset): pending_actions = [] for pending_actions_item_data in self.pending_actions: - pending_actions_item: Dict[str, Any] = UNSET + pending_actions_item: dict[str, Any] = UNSET if not isinstance(pending_actions_item_data, Unset): pending_actions_item = pending_actions_item_data.to_dict() pending_actions.append(pending_actions_item) pending_actions_count = self.pending_actions_count + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -64,8 +77,13 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.exploits_api_v1_report_pending_retrieve_response_200_pending_actions_item import ( + ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem, + ) + d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,20 +95,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: pending_actions = [] _pending_actions = d.pop("pending_actions", UNSET) - if _pending_actions is UNSET: - pending_actions = UNSET - else: - for pending_actions_item_data in _pending_actions or []: - _pending_actions_item = pending_actions_item_data - pending_actions_item: ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem - if isinstance(_pending_actions_item, Unset): - pending_actions_item = UNSET - else: - pending_actions_item = ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem.from_dict( - _pending_actions_item - ) - - pending_actions.append(pending_actions_item) + for pending_actions_item_data in _pending_actions or []: + # } + _pending_actions_item = pending_actions_item_data + pending_actions_item: ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem + if isinstance(_pending_actions_item, Unset): + pending_actions_item = UNSET + else: + pending_actions_item = ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem.from_dict( + _pending_actions_item + ) + + pending_actions.append(pending_actions_item) pending_actions_count = d.pop("pending_actions_count", UNSET) @@ -115,16 +131,14 @@ def get_fields(): return { "dt": datetime.datetime, "env": str, - "pending_actions": List[ - ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem - ], + "pending_actions": list["ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem"], "pending_actions_count": int, "revision": str, "version": str, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_pending_retrieve_response_200_pending_actions_item.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_pending_retrieve_response_200_pending_actions_item.py index e619cdc..bdc3373 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_pending_retrieve_response_200_pending_actions_item.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_report_pending_retrieve_response_200_pending_actions_item.py @@ -1,37 +1,31 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import OSIDBModel -T = TypeVar( - "T", bound="ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem" -) +T = TypeVar("T", bound="ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1ReportPendingRetrieveResponse200PendingActionsItem(OSIDBModel): """ """ - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() - exploits_api_v1_report_pending_retrieve_response_200_pending_actions_item = ( - cls() - ) + exploits_api_v1_report_pending_retrieve_response_200_pending_actions_item = cls() - exploits_api_v1_report_pending_retrieve_response_200_pending_actions_item.additional_properties = ( - d - ) + exploits_api_v1_report_pending_retrieve_response_200_pending_actions_item.additional_properties = d return exploits_api_v1_report_pending_retrieve_response_200_pending_actions_item @staticmethod @@ -39,7 +33,7 @@ def get_fields(): return {} @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_status_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_status_retrieve_response_200.py index 9077b17..ff7b056 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_status_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_status_retrieve_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,9 +10,18 @@ T = TypeVar("T", bound="ExploitsApiV1StatusRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1StatusRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + exploits_count (Union[Unset, int]): + exploits_count_relevant (Union[Unset, int]): + last_exploit (Union[Unset, int]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET @@ -20,21 +30,26 @@ class ExploitsApiV1StatusRetrieveResponse200(OSIDBModel): last_exploit: Union[Unset, int] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + exploits_count = self.exploits_count + exploits_count_relevant = self.exploits_count_relevant + last_exploit = self.last_exploit + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -54,8 +69,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -101,7 +117,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/exploits_api_v1_supported_products_list_response_200.py b/osidb_bindings/bindings/python_client/models/exploits_api_v1_supported_products_list_response_200.py index 16c620e..98c999b 100644 --- a/osidb_bindings/bindings/python_client/models/exploits_api_v1_supported_products_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/exploits_api_v1_supported_products_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.supported_products import SupportedProducts from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.supported_products import SupportedProducts + + T = TypeVar("T", bound="ExploitsApiV1SupportedProductsListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class ExploitsApiV1SupportedProductsListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['SupportedProducts']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[SupportedProducts] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["SupportedProducts"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.supported_products import SupportedProducts + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: SupportedProducts - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = SupportedProducts.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: SupportedProducts + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = SupportedProducts.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[SupportedProducts], - "next": str, - "previous": str, + "results": list["SupportedProducts"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw.py b/osidb_bindings/bindings/python_client/models/flaw.py index c6a6817..ef5c5cb 100644 --- a/osidb_bindings/bindings/python_client/models/flaw.py +++ b/osidb_bindings/bindings/python_client/models/flaw.py @@ -1,139 +1,186 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect import Affect -from ..models.alert import Alert from ..models.blank_enum import BlankEnum -from ..models.comment import Comment -from ..models.flaw_acknowledgment import FlawAcknowledgment -from ..models.flaw_classification import FlawClassification -from ..models.flaw_cvss import FlawCVSS -from ..models.flaw_reference import FlawReference from ..models.impact_enum import ImpactEnum from ..models.major_incident_state_enum import MajorIncidentStateEnum from ..models.nist_cvss_validation_enum import NistCvssValidationEnum -from ..models.package import Package from ..models.requires_cve_description_enum import RequiresCveDescriptionEnum from ..models.source_be_0_enum import SourceBe0Enum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect import Affect + from ..models.alert import Alert + from ..models.comment import Comment + from ..models.flaw_acknowledgment import FlawAcknowledgment + from ..models.flaw_classification import FlawClassification + from ..models.flaw_cvss import FlawCVSS + from ..models.flaw_reference import FlawReference + from ..models.package import Package + + T = TypeVar("T", bound="Flaw") -@attr.s(auto_attribs=True) +@_attrs_define class Flaw(OSIDBModel): - """serialize flaw model""" - - uuid: str + """serialize flaw model + + Attributes: + uuid (UUID): + title (str): + trackers (list[str]): + comment_zero (str): + affects (list['Affect']): + comments (list['Comment']): + package_versions (list['Package']): + acknowledgments (list['FlawAcknowledgment']): + references (list['FlawReference']): + cvss_scores (list['FlawCVSS']): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + classification (FlawClassification): + alerts (list['Alert']): + cve_id (Union[None, Unset, str]): + impact (Union[BlankEnum, ImpactEnum, Unset]): + components (Union[Unset, list[str]]): + cve_description (Union[Unset, str]): + requires_cve_description (Union[BlankEnum, RequiresCveDescriptionEnum, Unset]): + statement (Union[Unset, str]): + cwe_id (Union[Unset, str]): + unembargo_dt (Union[None, Unset, datetime.datetime]): + source (Union[BlankEnum, SourceBe0Enum, Unset]): + reported_dt (Union[None, Unset, datetime.datetime]): + mitigation (Union[Unset, str]): + major_incident_state (Union[BlankEnum, MajorIncidentStateEnum, Unset]): + major_incident_start_dt (Union[None, Unset, datetime.datetime]): + nist_cvss_validation (Union[BlankEnum, NistCvssValidationEnum, Unset]): + group_key (Union[Unset, str]): + owner (Union[Unset, str]): + task_key (Union[Unset, str]): + team_id (Union[Unset, str]): + """ + + uuid: UUID title: str - trackers: List[str] + trackers: list[str] comment_zero: str - affects: List[Affect] - comments: List[Comment] - package_versions: List[Package] - acknowledgments: List[FlawAcknowledgment] - references: List[FlawReference] - cvss_scores: List[FlawCVSS] + affects: list["Affect"] + comments: list["Comment"] + package_versions: list["Package"] + acknowledgments: list["FlawAcknowledgment"] + references: list["FlawReference"] + cvss_scores: list["FlawCVSS"] embargoed: bool created_dt: datetime.datetime updated_dt: datetime.datetime - classification: FlawClassification - alerts: List[Alert] - cve_id: Union[Unset, None, str] = UNSET + classification: "FlawClassification" + alerts: list["Alert"] + cve_id: Union[None, Unset, str] = UNSET impact: Union[BlankEnum, ImpactEnum, Unset] = UNSET - components: Union[Unset, List[str]] = UNSET + components: Union[Unset, list[str]] = UNSET cve_description: Union[Unset, str] = UNSET - requires_cve_description: Union[ - BlankEnum, RequiresCveDescriptionEnum, Unset - ] = UNSET + requires_cve_description: Union[BlankEnum, RequiresCveDescriptionEnum, Unset] = UNSET statement: Union[Unset, str] = UNSET cwe_id: Union[Unset, str] = UNSET - unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET + unembargo_dt: Union[None, Unset, datetime.datetime] = UNSET source: Union[BlankEnum, SourceBe0Enum, Unset] = UNSET - reported_dt: Union[Unset, None, datetime.datetime] = UNSET + reported_dt: Union[None, Unset, datetime.datetime] = UNSET mitigation: Union[Unset, str] = UNSET major_incident_state: Union[BlankEnum, MajorIncidentStateEnum, Unset] = UNSET - major_incident_start_dt: Union[Unset, None, datetime.datetime] = UNSET + major_incident_start_dt: Union[None, Unset, datetime.datetime] = UNSET nist_cvss_validation: Union[BlankEnum, NistCvssValidationEnum, Unset] = UNSET group_key: Union[Unset, str] = UNSET owner: Union[Unset, str] = UNSET task_key: Union[Unset, str] = UNSET team_id: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid title = self.title - trackers: List[str] = UNSET + + trackers: list[str] = UNSET if not isinstance(self.trackers, Unset): trackers = self.trackers comment_zero = self.comment_zero - affects: List[Dict[str, Any]] = UNSET + + affects: list[dict[str, Any]] = UNSET if not isinstance(self.affects, Unset): affects = [] for affects_item_data in self.affects: - affects_item: Dict[str, Any] = UNSET + affects_item: dict[str, Any] = UNSET if not isinstance(affects_item_data, Unset): affects_item = affects_item_data.to_dict() affects.append(affects_item) - comments: List[Dict[str, Any]] = UNSET + comments: list[dict[str, Any]] = UNSET if not isinstance(self.comments, Unset): comments = [] for comments_item_data in self.comments: - comments_item: Dict[str, Any] = UNSET + comments_item: dict[str, Any] = UNSET if not isinstance(comments_item_data, Unset): comments_item = comments_item_data.to_dict() comments.append(comments_item) - package_versions: List[Dict[str, Any]] = UNSET + package_versions: list[dict[str, Any]] = UNSET if not isinstance(self.package_versions, Unset): package_versions = [] for package_versions_item_data in self.package_versions: - package_versions_item: Dict[str, Any] = UNSET + package_versions_item: dict[str, Any] = UNSET if not isinstance(package_versions_item_data, Unset): package_versions_item = package_versions_item_data.to_dict() package_versions.append(package_versions_item) - acknowledgments: List[Dict[str, Any]] = UNSET + acknowledgments: list[dict[str, Any]] = UNSET if not isinstance(self.acknowledgments, Unset): acknowledgments = [] for acknowledgments_item_data in self.acknowledgments: - acknowledgments_item: Dict[str, Any] = UNSET + acknowledgments_item: dict[str, Any] = UNSET if not isinstance(acknowledgments_item_data, Unset): acknowledgments_item = acknowledgments_item_data.to_dict() acknowledgments.append(acknowledgments_item) - references: List[Dict[str, Any]] = UNSET + references: list[dict[str, Any]] = UNSET if not isinstance(self.references, Unset): references = [] for references_item_data in self.references: - references_item: Dict[str, Any] = UNSET + references_item: dict[str, Any] = UNSET if not isinstance(references_item_data, Unset): references_item = references_item_data.to_dict() references.append(references_item) - cvss_scores: List[Dict[str, Any]] = UNSET + cvss_scores: list[dict[str, Any]] = UNSET if not isinstance(self.cvss_scores, Unset): cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() cvss_scores.append(cvss_scores_item) embargoed = self.embargoed + created_dt: str = UNSET if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() @@ -142,65 +189,72 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - classification: Dict[str, Any] = UNSET + classification: dict[str, Any] = UNSET if not isinstance(self.classification, Unset): classification = self.classification.to_dict() - alerts: List[Dict[str, Any]] = UNSET + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() alerts.append(alerts_item) - cve_id = self.cve_id + cve_id: Union[None, Unset, str] + if isinstance(self.cve_id, Unset): + cve_id = UNSET + else: + cve_id = self.cve_id + impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): impact = UNSET if not isinstance(self.impact, Unset): - impact = ImpactEnum(self.impact).value else: impact = UNSET if not isinstance(self.impact, Unset): - impact = BlankEnum(self.impact).value - components: Union[Unset, List[str]] = UNSET + components: Union[Unset, list[str]] = UNSET if not isinstance(self.components, Unset): components = self.components cve_description = self.cve_description + requires_cve_description: Union[Unset, str] if isinstance(self.requires_cve_description, Unset): requires_cve_description = UNSET elif isinstance(self.requires_cve_description, RequiresCveDescriptionEnum): requires_cve_description = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = RequiresCveDescriptionEnum( - self.requires_cve_description - ).value + requires_cve_description = RequiresCveDescriptionEnum(self.requires_cve_description).value else: requires_cve_description = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = BlankEnum( - self.requires_cve_description - ).value + requires_cve_description = BlankEnum(self.requires_cve_description).value statement = self.statement + cwe_id = self.cwe_id - unembargo_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.unembargo_dt, Unset): - unembargo_dt = self.unembargo_dt.isoformat() if self.unembargo_dt else None + + unembargo_dt: Union[None, Unset, str] + if isinstance(self.unembargo_dt, Unset): + unembargo_dt = UNSET + elif isinstance(self.unembargo_dt, datetime.datetime): + unembargo_dt = UNSET + if not isinstance(self.unembargo_dt, Unset): + unembargo_dt = self.unembargo_dt.isoformat() + + else: + unembargo_dt = self.unembargo_dt source: Union[Unset, str] if isinstance(self.source, Unset): @@ -208,44 +262,49 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.source, SourceBe0Enum): source = UNSET if not isinstance(self.source, Unset): - source = SourceBe0Enum(self.source).value else: source = UNSET if not isinstance(self.source, Unset): - source = BlankEnum(self.source).value - reported_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.reported_dt, Unset): - reported_dt = self.reported_dt.isoformat() if self.reported_dt else None + reported_dt: Union[None, Unset, str] + if isinstance(self.reported_dt, Unset): + reported_dt = UNSET + elif isinstance(self.reported_dt, datetime.datetime): + reported_dt = UNSET + if not isinstance(self.reported_dt, Unset): + reported_dt = self.reported_dt.isoformat() + + else: + reported_dt = self.reported_dt mitigation = self.mitigation + major_incident_state: Union[Unset, str] if isinstance(self.major_incident_state, Unset): major_incident_state = UNSET elif isinstance(self.major_incident_state, MajorIncidentStateEnum): major_incident_state = UNSET if not isinstance(self.major_incident_state, Unset): - - major_incident_state = MajorIncidentStateEnum( - self.major_incident_state - ).value + major_incident_state = MajorIncidentStateEnum(self.major_incident_state).value else: major_incident_state = UNSET if not isinstance(self.major_incident_state, Unset): - major_incident_state = BlankEnum(self.major_incident_state).value - major_incident_start_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.major_incident_start_dt, Unset): - major_incident_start_dt = ( - self.major_incident_start_dt.isoformat() - if self.major_incident_start_dt - else None - ) + major_incident_start_dt: Union[None, Unset, str] + if isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = UNSET + elif isinstance(self.major_incident_start_dt, datetime.datetime): + major_incident_start_dt = UNSET + if not isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = self.major_incident_start_dt.isoformat() + + else: + major_incident_start_dt = self.major_incident_start_dt nist_cvss_validation: Union[Unset, str] if isinstance(self.nist_cvss_validation, Unset): @@ -253,23 +312,22 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.nist_cvss_validation, NistCvssValidationEnum): nist_cvss_validation = UNSET if not isinstance(self.nist_cvss_validation, Unset): - - nist_cvss_validation = NistCvssValidationEnum( - self.nist_cvss_validation - ).value + nist_cvss_validation = NistCvssValidationEnum(self.nist_cvss_validation).value else: nist_cvss_validation = UNSET if not isinstance(self.nist_cvss_validation, Unset): - nist_cvss_validation = BlankEnum(self.nist_cvss_validation).value group_key = self.group_key + owner = self.owner + task_key = self.task_key + team_id = self.team_id - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid @@ -340,282 +398,267 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - title = ( - self.title if self.title is UNSET else (None, str(self.title), "text/plain") - ) - trackers: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + title = (None, str(self.title).encode(), "text/plain") + + trackers: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.trackers, Unset): _temp_trackers = self.trackers - trackers = (None, json.dumps(_temp_trackers), "application/json") + trackers = (None, json.dumps(_temp_trackers).encode(), "application/json") - comment_zero = ( - self.comment_zero - if self.comment_zero is UNSET - else (None, str(self.comment_zero), "text/plain") - ) - affects: Union[Unset, Tuple[None, str, str]] = UNSET + comment_zero = (None, str(self.comment_zero).encode(), "text/plain") + + affects: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.affects, Unset): _temp_affects = [] for affects_item_data in self.affects: - affects_item: Dict[str, Any] = UNSET + affects_item: dict[str, Any] = UNSET if not isinstance(affects_item_data, Unset): affects_item = affects_item_data.to_dict() _temp_affects.append(affects_item) - affects = (None, json.dumps(_temp_affects), "application/json") + affects = (None, json.dumps(_temp_affects).encode(), "application/json") - comments: Union[Unset, Tuple[None, str, str]] = UNSET + comments: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.comments, Unset): _temp_comments = [] for comments_item_data in self.comments: - comments_item: Dict[str, Any] = UNSET + comments_item: dict[str, Any] = UNSET if not isinstance(comments_item_data, Unset): comments_item = comments_item_data.to_dict() _temp_comments.append(comments_item) - comments = (None, json.dumps(_temp_comments), "application/json") + comments = (None, json.dumps(_temp_comments).encode(), "application/json") - package_versions: Union[Unset, Tuple[None, str, str]] = UNSET + package_versions: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.package_versions, Unset): _temp_package_versions = [] for package_versions_item_data in self.package_versions: - package_versions_item: Dict[str, Any] = UNSET + package_versions_item: dict[str, Any] = UNSET if not isinstance(package_versions_item_data, Unset): package_versions_item = package_versions_item_data.to_dict() _temp_package_versions.append(package_versions_item) - package_versions = ( - None, - json.dumps(_temp_package_versions), - "application/json", - ) + package_versions = (None, json.dumps(_temp_package_versions).encode(), "application/json") - acknowledgments: Union[Unset, Tuple[None, str, str]] = UNSET + acknowledgments: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.acknowledgments, Unset): _temp_acknowledgments = [] for acknowledgments_item_data in self.acknowledgments: - acknowledgments_item: Dict[str, Any] = UNSET + acknowledgments_item: dict[str, Any] = UNSET if not isinstance(acknowledgments_item_data, Unset): acknowledgments_item = acknowledgments_item_data.to_dict() _temp_acknowledgments.append(acknowledgments_item) - acknowledgments = ( - None, - json.dumps(_temp_acknowledgments), - "application/json", - ) + acknowledgments = (None, json.dumps(_temp_acknowledgments).encode(), "application/json") - references: Union[Unset, Tuple[None, str, str]] = UNSET + references: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.references, Unset): _temp_references = [] for references_item_data in self.references: - references_item: Dict[str, Any] = UNSET + references_item: dict[str, Any] = UNSET if not isinstance(references_item_data, Unset): references_item = references_item_data.to_dict() _temp_references.append(references_item) - references = (None, json.dumps(_temp_references), "application/json") + references = (None, json.dumps(_temp_references).encode(), "application/json") - cvss_scores: Union[Unset, Tuple[None, str, str]] = UNSET + cvss_scores: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.cvss_scores, Unset): _temp_cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() _temp_cvss_scores.append(cvss_scores_item) - cvss_scores = (None, json.dumps(_temp_cvss_scores), "application/json") + cvss_scores = (None, json.dumps(_temp_cvss_scores).encode(), "application/json") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - created_dt: str = UNSET + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() - updated_dt: str = UNSET + updated_dt: bytes = UNSET if not isinstance(self.updated_dt, Unset): - updated_dt = self.updated_dt.isoformat() + updated_dt = self.updated_dt.isoformat().encode() - classification: Union[Unset, Tuple[None, str, str]] = UNSET + classification: tuple[None, bytes, str] = UNSET if not isinstance(self.classification, Unset): - classification = ( - None, - json.dumps(self.classification.to_dict()), - "application/json", - ) + classification = (None, json.dumps(self.classification.to_dict()).encode(), "application/json") - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") + + cve_id: Union[Unset, tuple[None, bytes, str]] + + if isinstance(self.cve_id, Unset): + cve_id = UNSET + elif isinstance(self.cve_id, str): + cve_id = (None, str(self.cve_id).encode(), "text/plain") + else: + cve_id = (None, str(self.cve_id).encode(), "text/plain") + + impact: Union[Unset, tuple[None, bytes, str]] - cve_id = ( - self.cve_id - if self.cve_id is UNSET - else (None, str(self.cve_id), "text/plain") - ) - impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): - impact = UNSET + impact: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.impact, Unset): - - impact = ImpactEnum(self.impact).value - + impact = (None, str(self.impact.value).encode(), "text/plain") + # CHANGE END (3) #} else: - impact = UNSET + impact: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.impact, Unset): + impact = (None, str(self.impact.value).encode(), "text/plain") + # CHANGE END (3) #} - impact = BlankEnum(self.impact).value - - components: Union[Unset, Tuple[None, str, str]] = UNSET + components: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.components, Unset): _temp_components = self.components - components = (None, json.dumps(_temp_components), "application/json") + components = (None, json.dumps(_temp_components).encode(), "application/json") cve_description = ( self.cve_description - if self.cve_description is UNSET - else (None, str(self.cve_description), "text/plain") + if isinstance(self.cve_description, Unset) + else (None, str(self.cve_description).encode(), "text/plain") ) - requires_cve_description: Union[Unset, str] + + requires_cve_description: Union[Unset, tuple[None, bytes, str]] + if isinstance(self.requires_cve_description, Unset): requires_cve_description = UNSET elif isinstance(self.requires_cve_description, RequiresCveDescriptionEnum): - requires_cve_description = UNSET + requires_cve_description: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = RequiresCveDescriptionEnum( - self.requires_cve_description - ).value - + requires_cve_description = (None, str(self.requires_cve_description.value).encode(), "text/plain") + # CHANGE END (3) #} else: - requires_cve_description = UNSET + requires_cve_description: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = BlankEnum( - self.requires_cve_description - ).value + requires_cve_description = (None, str(self.requires_cve_description.value).encode(), "text/plain") + # CHANGE END (3) #} statement = ( - self.statement - if self.statement is UNSET - else (None, str(self.statement), "text/plain") - ) - cwe_id = ( - self.cwe_id - if self.cwe_id is UNSET - else (None, str(self.cwe_id), "text/plain") + self.statement if isinstance(self.statement, Unset) else (None, str(self.statement).encode(), "text/plain") ) - unembargo_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.unembargo_dt, Unset): - unembargo_dt = self.unembargo_dt.isoformat() if self.unembargo_dt else None - source: Union[Unset, str] + cwe_id = self.cwe_id if isinstance(self.cwe_id, Unset) else (None, str(self.cwe_id).encode(), "text/plain") + + unembargo_dt: Union[Unset, tuple[None, bytes, str]] + + if isinstance(self.unembargo_dt, Unset): + unembargo_dt = UNSET + elif isinstance(self.unembargo_dt, datetime.datetime): + unembargo_dt: bytes = UNSET + if not isinstance(self.unembargo_dt, Unset): + unembargo_dt = self.unembargo_dt.isoformat().encode() + else: + unembargo_dt = (None, str(self.unembargo_dt).encode(), "text/plain") + + source: Union[Unset, tuple[None, bytes, str]] + if isinstance(self.source, Unset): source = UNSET elif isinstance(self.source, SourceBe0Enum): - source = UNSET + source: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.source, Unset): - - source = SourceBe0Enum(self.source).value - + source = (None, str(self.source.value).encode(), "text/plain") + # CHANGE END (3) #} else: - source = UNSET + source: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.source, Unset): + source = (None, str(self.source.value).encode(), "text/plain") + # CHANGE END (3) #} - source = BlankEnum(self.source).value + reported_dt: Union[Unset, tuple[None, bytes, str]] - reported_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.reported_dt, Unset): - reported_dt = self.reported_dt.isoformat() if self.reported_dt else None + if isinstance(self.reported_dt, Unset): + reported_dt = UNSET + elif isinstance(self.reported_dt, datetime.datetime): + reported_dt: bytes = UNSET + if not isinstance(self.reported_dt, Unset): + reported_dt = self.reported_dt.isoformat().encode() + else: + reported_dt = (None, str(self.reported_dt).encode(), "text/plain") mitigation = ( self.mitigation - if self.mitigation is UNSET - else (None, str(self.mitigation), "text/plain") + if isinstance(self.mitigation, Unset) + else (None, str(self.mitigation).encode(), "text/plain") ) - major_incident_state: Union[Unset, str] + + major_incident_state: Union[Unset, tuple[None, bytes, str]] + if isinstance(self.major_incident_state, Unset): major_incident_state = UNSET elif isinstance(self.major_incident_state, MajorIncidentStateEnum): - major_incident_state = UNSET + major_incident_state: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.major_incident_state, Unset): - - major_incident_state = MajorIncidentStateEnum( - self.major_incident_state - ).value - + major_incident_state = (None, str(self.major_incident_state.value).encode(), "text/plain") + # CHANGE END (3) #} else: - major_incident_state = UNSET + major_incident_state: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.major_incident_state, Unset): + major_incident_state = (None, str(self.major_incident_state.value).encode(), "text/plain") + # CHANGE END (3) #} - major_incident_state = BlankEnum(self.major_incident_state).value + major_incident_start_dt: Union[Unset, tuple[None, bytes, str]] - major_incident_start_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.major_incident_start_dt, Unset): - major_incident_start_dt = ( - self.major_incident_start_dt.isoformat() - if self.major_incident_start_dt - else None - ) + if isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = UNSET + elif isinstance(self.major_incident_start_dt, datetime.datetime): + major_incident_start_dt: bytes = UNSET + if not isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = self.major_incident_start_dt.isoformat().encode() + else: + major_incident_start_dt = (None, str(self.major_incident_start_dt).encode(), "text/plain") + + nist_cvss_validation: Union[Unset, tuple[None, bytes, str]] - nist_cvss_validation: Union[Unset, str] if isinstance(self.nist_cvss_validation, Unset): nist_cvss_validation = UNSET elif isinstance(self.nist_cvss_validation, NistCvssValidationEnum): - nist_cvss_validation = UNSET + nist_cvss_validation: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.nist_cvss_validation, Unset): - - nist_cvss_validation = NistCvssValidationEnum( - self.nist_cvss_validation - ).value - + nist_cvss_validation = (None, str(self.nist_cvss_validation.value).encode(), "text/plain") + # CHANGE END (3) #} else: - nist_cvss_validation = UNSET + nist_cvss_validation: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.nist_cvss_validation, Unset): - - nist_cvss_validation = BlankEnum(self.nist_cvss_validation).value + nist_cvss_validation = (None, str(self.nist_cvss_validation.value).encode(), "text/plain") + # CHANGE END (3) #} group_key = ( - self.group_key - if self.group_key is UNSET - else (None, str(self.group_key), "text/plain") - ) - owner = ( - self.owner if self.owner is UNSET else (None, str(self.owner), "text/plain") + self.group_key if isinstance(self.group_key, Unset) else (None, str(self.group_key).encode(), "text/plain") ) + + owner = self.owner if isinstance(self.owner, Unset) else (None, str(self.owner).encode(), "text/plain") + task_key = ( - self.task_key - if self.task_key is UNSET - else (None, str(self.task_key), "text/plain") - ) - team_id = ( - self.team_id - if self.team_id is UNSET - else (None, str(self.team_id), "text/plain") + self.task_key if isinstance(self.task_key, Unset) else (None, str(self.task_key).encode(), "text/plain") ) - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) + team_id = self.team_id if isinstance(self.team_id, Unset) else (None, str(self.team_id).encode(), "text/plain") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") + if not isinstance(uuid, Unset): field_dict["uuid"] = uuid if not isinstance(title, Unset): @@ -686,110 +729,112 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + from ..models.alert import Alert + from ..models.comment import Comment + from ..models.flaw_acknowledgment import FlawAcknowledgment + from ..models.flaw_classification import FlawClassification + from ..models.flaw_cvss import FlawCVSS + from ..models.flaw_reference import FlawReference + from ..models.package import Package + d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) title = d.pop("title", UNSET) - trackers = cast(List[str], d.pop("trackers", UNSET)) + trackers = cast(list[str], d.pop("trackers", UNSET)) comment_zero = d.pop("comment_zero", UNSET) affects = [] _affects = d.pop("affects", UNSET) - if _affects is UNSET: - affects = UNSET - else: - for affects_item_data in _affects or []: - _affects_item = affects_item_data - affects_item: Affect - if isinstance(_affects_item, Unset): - affects_item = UNSET - else: - affects_item = Affect.from_dict(_affects_item) + for affects_item_data in _affects or []: + # } + _affects_item = affects_item_data + affects_item: Affect + if isinstance(_affects_item, Unset): + affects_item = UNSET + else: + affects_item = Affect.from_dict(_affects_item) - affects.append(affects_item) + affects.append(affects_item) comments = [] _comments = d.pop("comments", UNSET) - if _comments is UNSET: - comments = UNSET - else: - for comments_item_data in _comments or []: - _comments_item = comments_item_data - comments_item: Comment - if isinstance(_comments_item, Unset): - comments_item = UNSET - else: - comments_item = Comment.from_dict(_comments_item) + for comments_item_data in _comments or []: + # } + _comments_item = comments_item_data + comments_item: Comment + if isinstance(_comments_item, Unset): + comments_item = UNSET + else: + comments_item = Comment.from_dict(_comments_item) - comments.append(comments_item) + comments.append(comments_item) package_versions = [] _package_versions = d.pop("package_versions", UNSET) - if _package_versions is UNSET: - package_versions = UNSET - else: - for package_versions_item_data in _package_versions or []: - _package_versions_item = package_versions_item_data - package_versions_item: Package - if isinstance(_package_versions_item, Unset): - package_versions_item = UNSET - else: - package_versions_item = Package.from_dict(_package_versions_item) + for package_versions_item_data in _package_versions or []: + # } + _package_versions_item = package_versions_item_data + package_versions_item: Package + if isinstance(_package_versions_item, Unset): + package_versions_item = UNSET + else: + package_versions_item = Package.from_dict(_package_versions_item) - package_versions.append(package_versions_item) + package_versions.append(package_versions_item) acknowledgments = [] _acknowledgments = d.pop("acknowledgments", UNSET) - if _acknowledgments is UNSET: - acknowledgments = UNSET - else: - for acknowledgments_item_data in _acknowledgments or []: - _acknowledgments_item = acknowledgments_item_data - acknowledgments_item: FlawAcknowledgment - if isinstance(_acknowledgments_item, Unset): - acknowledgments_item = UNSET - else: - acknowledgments_item = FlawAcknowledgment.from_dict( - _acknowledgments_item - ) + for acknowledgments_item_data in _acknowledgments or []: + # } + _acknowledgments_item = acknowledgments_item_data + acknowledgments_item: FlawAcknowledgment + if isinstance(_acknowledgments_item, Unset): + acknowledgments_item = UNSET + else: + acknowledgments_item = FlawAcknowledgment.from_dict(_acknowledgments_item) - acknowledgments.append(acknowledgments_item) + acknowledgments.append(acknowledgments_item) references = [] _references = d.pop("references", UNSET) - if _references is UNSET: - references = UNSET - else: - for references_item_data in _references or []: - _references_item = references_item_data - references_item: FlawReference - if isinstance(_references_item, Unset): - references_item = UNSET - else: - references_item = FlawReference.from_dict(_references_item) + for references_item_data in _references or []: + # } + _references_item = references_item_data + references_item: FlawReference + if isinstance(_references_item, Unset): + references_item = UNSET + else: + references_item = FlawReference.from_dict(_references_item) - references.append(references_item) + references.append(references_item) cvss_scores = [] _cvss_scores = d.pop("cvss_scores", UNSET) - if _cvss_scores is UNSET: - cvss_scores = UNSET - else: - for cvss_scores_item_data in _cvss_scores or []: - _cvss_scores_item = cvss_scores_item_data - cvss_scores_item: FlawCVSS - if isinstance(_cvss_scores_item, Unset): - cvss_scores_item = UNSET - else: - cvss_scores_item = FlawCVSS.from_dict(_cvss_scores_item) + for cvss_scores_item_data in _cvss_scores or []: + # } + _cvss_scores_item = cvss_scores_item_data + cvss_scores_item: FlawCVSS + if isinstance(_cvss_scores_item, Unset): + cvss_scores_item = UNSET + else: + cvss_scores_item = FlawCVSS.from_dict(_cvss_scores_item) - cvss_scores.append(cvss_scores_item) + cvss_scores.append(cvss_scores_item) embargoed = d.pop("embargoed", UNSET) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -797,6 +842,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -804,6 +850,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) + # } _classification = d.pop("classification", UNSET) classification: FlawClassification if isinstance(_classification, Unset): @@ -813,20 +860,25 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) - alerts.append(alerts_item) + alerts.append(alerts_item) - cve_id = d.pop("cve_id", UNSET) + def _parse_cve_id(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + cve_id = _parse_cve_id(d.pop("cve_id", UNSET)) def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: if isinstance(data, Unset): @@ -834,8 +886,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _impact_type_0 = data - impact_type_0: Union[Unset, ImpactEnum] + impact_type_0: ImpactEnum if isinstance(_impact_type_0, Unset): impact_type_0 = UNSET else: @@ -846,8 +899,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _impact_type_1 = data - impact_type_1: Union[Unset, BlankEnum] + impact_type_1: BlankEnum if isinstance(_impact_type_1, Unset): impact_type_1 = UNSET else: @@ -857,61 +911,67 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: impact = _parse_impact(d.pop("impact", UNSET)) - components = cast(List[str], d.pop("components", UNSET)) + components = cast(list[str], d.pop("components", UNSET)) cve_description = d.pop("cve_description", UNSET) - def _parse_requires_cve_description( - data: object, - ) -> Union[BlankEnum, RequiresCveDescriptionEnum, Unset]: + def _parse_requires_cve_description(data: object) -> Union[BlankEnum, RequiresCveDescriptionEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _requires_cve_description_type_0 = data - requires_cve_description_type_0: Union[ - Unset, RequiresCveDescriptionEnum - ] + requires_cve_description_type_0: RequiresCveDescriptionEnum if isinstance(_requires_cve_description_type_0, Unset): requires_cve_description_type_0 = UNSET else: - requires_cve_description_type_0 = RequiresCveDescriptionEnum( - _requires_cve_description_type_0 - ) + requires_cve_description_type_0 = RequiresCveDescriptionEnum(_requires_cve_description_type_0) return requires_cve_description_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _requires_cve_description_type_1 = data - requires_cve_description_type_1: Union[Unset, BlankEnum] + requires_cve_description_type_1: BlankEnum if isinstance(_requires_cve_description_type_1, Unset): requires_cve_description_type_1 = UNSET else: - requires_cve_description_type_1 = BlankEnum( - _requires_cve_description_type_1 - ) + requires_cve_description_type_1 = BlankEnum(_requires_cve_description_type_1) return requires_cve_description_type_1 - requires_cve_description = _parse_requires_cve_description( - d.pop("requires_cve_description", UNSET) - ) + requires_cve_description = _parse_requires_cve_description(d.pop("requires_cve_description", UNSET)) statement = d.pop("statement", UNSET) cwe_id = d.pop("cwe_id", UNSET) - _unembargo_dt = d.pop("unembargo_dt", UNSET) - unembargo_dt: Union[Unset, None, datetime.datetime] - if _unembargo_dt is None: - unembargo_dt = None - elif isinstance(_unembargo_dt, Unset): - unembargo_dt = UNSET - else: - unembargo_dt = isoparse(_unembargo_dt) + def _parse_unembargo_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _unembargo_dt_type_0 = data + unembargo_dt_type_0: datetime.datetime + if isinstance(_unembargo_dt_type_0, Unset): + unembargo_dt_type_0 = UNSET + else: + unembargo_dt_type_0 = isoparse(_unembargo_dt_type_0) + + return unembargo_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + unembargo_dt = _parse_unembargo_dt(d.pop("unembargo_dt", UNSET)) def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: if isinstance(data, Unset): @@ -919,8 +979,9 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _source_type_0 = data - source_type_0: Union[Unset, SourceBe0Enum] + source_type_0: SourceBe0Enum if isinstance(_source_type_0, Unset): source_type_0 = UNSET else: @@ -931,8 +992,9 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _source_type_1 = data - source_type_1: Union[Unset, BlankEnum] + source_type_1: BlankEnum if isinstance(_source_type_1, Unset): source_type_1 = UNSET else: @@ -942,41 +1004,53 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: source = _parse_source(d.pop("source", UNSET)) - _reported_dt = d.pop("reported_dt", UNSET) - reported_dt: Union[Unset, None, datetime.datetime] - if _reported_dt is None: - reported_dt = None - elif isinstance(_reported_dt, Unset): - reported_dt = UNSET - else: - reported_dt = isoparse(_reported_dt) + def _parse_reported_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _reported_dt_type_0 = data + reported_dt_type_0: datetime.datetime + if isinstance(_reported_dt_type_0, Unset): + reported_dt_type_0 = UNSET + else: + reported_dt_type_0 = isoparse(_reported_dt_type_0) + + return reported_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + reported_dt = _parse_reported_dt(d.pop("reported_dt", UNSET)) mitigation = d.pop("mitigation", UNSET) - def _parse_major_incident_state( - data: object, - ) -> Union[BlankEnum, MajorIncidentStateEnum, Unset]: + def _parse_major_incident_state(data: object) -> Union[BlankEnum, MajorIncidentStateEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _major_incident_state_type_0 = data - major_incident_state_type_0: Union[Unset, MajorIncidentStateEnum] + major_incident_state_type_0: MajorIncidentStateEnum if isinstance(_major_incident_state_type_0, Unset): major_incident_state_type_0 = UNSET else: - major_incident_state_type_0 = MajorIncidentStateEnum( - _major_incident_state_type_0 - ) + major_incident_state_type_0 = MajorIncidentStateEnum(_major_incident_state_type_0) return major_incident_state_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _major_incident_state_type_1 = data - major_incident_state_type_1: Union[Unset, BlankEnum] + major_incident_state_type_1: BlankEnum if isinstance(_major_incident_state_type_1, Unset): major_incident_state_type_1 = UNSET else: @@ -984,43 +1058,53 @@ def _parse_major_incident_state( return major_incident_state_type_1 - major_incident_state = _parse_major_incident_state( - d.pop("major_incident_state", UNSET) - ) + major_incident_state = _parse_major_incident_state(d.pop("major_incident_state", UNSET)) - _major_incident_start_dt = d.pop("major_incident_start_dt", UNSET) - major_incident_start_dt: Union[Unset, None, datetime.datetime] - if _major_incident_start_dt is None: - major_incident_start_dt = None - elif isinstance(_major_incident_start_dt, Unset): - major_incident_start_dt = UNSET - else: - major_incident_start_dt = isoparse(_major_incident_start_dt) + def _parse_major_incident_start_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _major_incident_start_dt_type_0 = data + major_incident_start_dt_type_0: datetime.datetime + if isinstance(_major_incident_start_dt_type_0, Unset): + major_incident_start_dt_type_0 = UNSET + else: + major_incident_start_dt_type_0 = isoparse(_major_incident_start_dt_type_0) - def _parse_nist_cvss_validation( - data: object, - ) -> Union[BlankEnum, NistCvssValidationEnum, Unset]: + return major_incident_start_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + major_incident_start_dt = _parse_major_incident_start_dt(d.pop("major_incident_start_dt", UNSET)) + + def _parse_nist_cvss_validation(data: object) -> Union[BlankEnum, NistCvssValidationEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _nist_cvss_validation_type_0 = data - nist_cvss_validation_type_0: Union[Unset, NistCvssValidationEnum] + nist_cvss_validation_type_0: NistCvssValidationEnum if isinstance(_nist_cvss_validation_type_0, Unset): nist_cvss_validation_type_0 = UNSET else: - nist_cvss_validation_type_0 = NistCvssValidationEnum( - _nist_cvss_validation_type_0 - ) + nist_cvss_validation_type_0 = NistCvssValidationEnum(_nist_cvss_validation_type_0) return nist_cvss_validation_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _nist_cvss_validation_type_1 = data - nist_cvss_validation_type_1: Union[Unset, BlankEnum] + nist_cvss_validation_type_1: BlankEnum if isinstance(_nist_cvss_validation_type_1, Unset): nist_cvss_validation_type_1 = UNSET else: @@ -1028,9 +1112,7 @@ def _parse_nist_cvss_validation( return nist_cvss_validation_type_1 - nist_cvss_validation = _parse_nist_cvss_validation( - d.pop("nist_cvss_validation", UNSET) - ) + nist_cvss_validation = _parse_nist_cvss_validation(d.pop("nist_cvss_validation", UNSET)) group_key = d.pop("group_key", UNSET) @@ -1082,34 +1164,34 @@ def _parse_nist_cvss_validation( @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, "title": str, - "trackers": List[str], + "trackers": list[str], "comment_zero": str, - "affects": List[Affect], - "comments": List[Comment], - "package_versions": List[Package], - "acknowledgments": List[FlawAcknowledgment], - "references": List[FlawReference], - "cvss_scores": List[FlawCVSS], + "affects": list["Affect"], + "comments": list["Comment"], + "package_versions": list["Package"], + "acknowledgments": list["FlawAcknowledgment"], + "references": list["FlawReference"], + "cvss_scores": list["FlawCVSS"], "embargoed": bool, "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "classification": FlawClassification, - "alerts": List[Alert], - "cve_id": str, + "alerts": list["Alert"], + "cve_id": Union[None, str], "impact": Union[BlankEnum, ImpactEnum], - "components": List[str], + "components": list[str], "cve_description": str, "requires_cve_description": Union[BlankEnum, RequiresCveDescriptionEnum], "statement": str, "cwe_id": str, - "unembargo_dt": datetime.datetime, + "unembargo_dt": Union[None, datetime.datetime], "source": Union[BlankEnum, SourceBe0Enum], - "reported_dt": datetime.datetime, + "reported_dt": Union[None, datetime.datetime], "mitigation": str, "major_incident_state": Union[BlankEnum, MajorIncidentStateEnum], - "major_incident_start_dt": datetime.datetime, + "major_incident_start_dt": Union[None, datetime.datetime], "nist_cvss_validation": Union[BlankEnum, NistCvssValidationEnum], "group_key": str, "owner": str, @@ -1118,7 +1200,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_acknowledgment.py b/osidb_bindings/bindings/python_client/models/flaw_acknowledgment.py index 87771cd..f540d2b 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_acknowledgment.py +++ b/osidb_bindings/bindings/python_client/models/flaw_acknowledgment.py @@ -1,42 +1,71 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="FlawAcknowledgment") -@attr.s(auto_attribs=True) +@_attrs_define class FlawAcknowledgment(OSIDBModel): - """FlawAcknowledgment serializer""" + """FlawAcknowledgment serializer + + Attributes: + name (str): + affiliation (str): + from_upstream (bool): + flaw (UUID): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + """ name: str affiliation: str from_upstream: bool - flaw: str - uuid: str + flaw: UUID + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: name = self.name + affiliation = self.affiliation + from_upstream = self.from_upstream - flaw = self.flaw - uuid = self.uuid + + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -50,7 +79,7 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(name, Unset): field_dict["name"] = name @@ -74,7 +103,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() name = d.pop("name", UNSET) @@ -82,27 +113,38 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from_upstream = d.pop("from_upstream", UNSET) - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -110,6 +152,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -138,16 +181,16 @@ def get_fields(): "name": str, "affiliation": str, "from_upstream": bool, - "flaw": str, - "uuid": str, + "flaw": UUID, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_acknowledgment_post.py b/osidb_bindings/bindings/python_client/models/flaw_acknowledgment_post.py index 983ff38..eb9f87a 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_acknowledgment_post.py +++ b/osidb_bindings/bindings/python_client/models/flaw_acknowledgment_post.py @@ -1,40 +1,63 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="FlawAcknowledgmentPost") -@attr.s(auto_attribs=True) +@_attrs_define class FlawAcknowledgmentPost(OSIDBModel): - """FlawAcknowledgment serializer""" + """FlawAcknowledgment serializer + + Attributes: + name (str): + affiliation (str): + from_upstream (bool): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + """ name: str affiliation: str from_upstream: bool - uuid: str + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: name = self.name + affiliation = self.affiliation + from_upstream = self.from_upstream - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -44,7 +67,7 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(name, Unset): field_dict["name"] = name @@ -63,46 +86,38 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - name = self.name if self.name is UNSET else (None, str(self.name), "text/plain") - affiliation = ( - self.affiliation - if self.affiliation is UNSET - else (None, str(self.affiliation), "text/plain") - ) - from_upstream = ( - self.from_upstream - if self.from_upstream is UNSET - else (None, str(self.from_upstream), "text/plain") - ) - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + name = (None, str(self.name).encode(), "text/plain") + + affiliation = (None, str(self.affiliation).encode(), "text/plain") + + from_upstream = (None, str(self.from_upstream).encode(), "text/plain") + + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(name, Unset): field_dict["name"] = name if not isinstance(affiliation, Unset): @@ -121,7 +136,9 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() name = d.pop("name", UNSET) @@ -129,25 +146,30 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from_upstream = d.pop("from_upstream", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -174,14 +196,14 @@ def get_fields(): "name": str, "affiliation": str, "from_upstream": bool, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_acknowledgment_put.py b/osidb_bindings/bindings/python_client/models/flaw_acknowledgment_put.py index 4d0854a..da28433 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_acknowledgment_put.py +++ b/osidb_bindings/bindings/python_client/models/flaw_acknowledgment_put.py @@ -1,41 +1,66 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="FlawAcknowledgmentPut") -@attr.s(auto_attribs=True) +@_attrs_define class FlawAcknowledgmentPut(OSIDBModel): - """FlawAcknowledgment serializer""" + """FlawAcknowledgment serializer + + Attributes: + name (str): + affiliation (str): + from_upstream (bool): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + """ name: str affiliation: str from_upstream: bool - uuid: str + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: name = self.name + affiliation = self.affiliation + from_upstream = self.from_upstream - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -49,7 +74,7 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(name, Unset): field_dict["name"] = name @@ -70,50 +95,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - name = self.name if self.name is UNSET else (None, str(self.name), "text/plain") - affiliation = ( - self.affiliation - if self.affiliation is UNSET - else (None, str(self.affiliation), "text/plain") - ) - from_upstream = ( - self.from_upstream - if self.from_upstream is UNSET - else (None, str(self.from_upstream), "text/plain") - ) - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + name = (None, str(self.name).encode(), "text/plain") + + affiliation = (None, str(self.affiliation).encode(), "text/plain") + + from_upstream = (None, str(self.from_upstream).encode(), "text/plain") + + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() - updated_dt: str = UNSET + updated_dt: bytes = UNSET if not isinstance(self.updated_dt, Unset): - updated_dt = self.updated_dt.isoformat() + updated_dt = self.updated_dt.isoformat().encode() + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(name, Unset): field_dict["name"] = name if not isinstance(affiliation, Unset): @@ -134,7 +151,9 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() name = d.pop("name", UNSET) @@ -142,25 +161,30 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from_upstream = d.pop("from_upstream", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -168,6 +192,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -195,15 +220,15 @@ def get_fields(): "name": str, "affiliation": str, "from_upstream": bool, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_classification.py b/osidb_bindings/bindings/python_client/models/flaw_classification.py index a394e4f..30f8745 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_classification.py +++ b/osidb_bindings/bindings/python_client/models/flaw_classification.py @@ -1,6 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..models.flaw_classification_state import FlawClassificationState from ..types import UNSET, OSIDBModel, Unset @@ -8,22 +9,26 @@ T = TypeVar("T", bound="FlawClassification") -@attr.s(auto_attribs=True) +@_attrs_define class FlawClassification(OSIDBModel): - """ """ + """ + Attributes: + workflow (Union[Unset, str]): + state (Union[Unset, FlawClassificationState]): + """ workflow: Union[Unset, str] = UNSET state: Union[Unset, FlawClassificationState] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: workflow = self.workflow + state: Union[Unset, str] = UNSET if not isinstance(self.state, Unset): - state = FlawClassificationState(self.state).value - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(workflow, Unset): field_dict["workflow"] = workflow @@ -33,10 +38,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() workflow = d.pop("workflow", UNSET) + # } _state = d.pop("state", UNSET) state: Union[Unset, FlawClassificationState] if isinstance(_state, Unset): @@ -60,7 +66,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_classification_state.py b/osidb_bindings/bindings/python_client/models/flaw_classification_state.py index 1810a64..f7032c4 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_classification_state.py +++ b/osidb_bindings/bindings/python_client/models/flaw_classification_state.py @@ -2,13 +2,13 @@ class FlawClassificationState(str, Enum): - VALUE_0 = "" + DONE = "DONE" NEW = "NEW" - TRIAGE = "TRIAGE" PRE_SECONDARY_ASSESSMENT = "PRE_SECONDARY_ASSESSMENT" - SECONDARY_ASSESSMENT = "SECONDARY_ASSESSMENT" - DONE = "DONE" REJECTED = "REJECTED" + SECONDARY_ASSESSMENT = "SECONDARY_ASSESSMENT" + TRIAGE = "TRIAGE" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/flaw_comment.py b/osidb_bindings/bindings/python_client/models/flaw_comment.py index a6fdecf..e6af29a 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_comment.py +++ b/osidb_bindings/bindings/python_client/models/flaw_comment.py @@ -1,42 +1,71 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="FlawComment") -@attr.s(auto_attribs=True) +@_attrs_define class FlawComment(OSIDBModel): - """FlawComment serializer for use by flaw_comments endpoint""" - - flaw: str + """FlawComment serializer for use by flaw_comments endpoint + + Attributes: + flaw (UUID): + text (str): + uuid (UUID): + external_system_id (str): + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + order (Union[Unset, int]): + creator (Union[Unset, str]): + is_private (Union[Unset, bool]): + """ + + flaw: UUID text: str - uuid: str + uuid: UUID external_system_id: str - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime embargoed: bool order: Union[Unset, int] = UNSET creator: Union[Unset, str] = UNSET is_private: Union[Unset, bool] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) - def to_dict(self) -> Dict[str, Any]: - flaw = self.flaw text = self.text - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + external_system_id = self.external_system_id - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -51,11 +80,14 @@ def to_dict(self) -> Dict[str, Any]: updated_dt = self.updated_dt.isoformat() embargoed = self.embargoed + order = self.order + creator = self.creator + is_private = self.is_private - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(flaw, Unset): field_dict["flaw"] = flaw @@ -83,31 +115,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) text = d.pop("text", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) external_system_id = d.pop("external_system_id", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -115,6 +160,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -150,11 +196,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "flaw": str, + "flaw": UUID, "text": str, - "uuid": str, + "uuid": UUID, "external_system_id": str, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "embargoed": bool, @@ -164,7 +210,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_comment_post.py b/osidb_bindings/bindings/python_client/models/flaw_comment_post.py index 90a6689..43b8823 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_comment_post.py +++ b/osidb_bindings/bindings/python_client/models/flaw_comment_post.py @@ -1,37 +1,57 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="FlawCommentPost") -@attr.s(auto_attribs=True) +@_attrs_define class FlawCommentPost(OSIDBModel): - """FlawComment serializer for use by flaw_comments endpoint""" + """FlawComment serializer for use by flaw_comments endpoint + + Attributes: + text (str): + uuid (UUID): + alerts (list['Alert']): + created_dt (datetime.datetime): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + creator (Union[Unset, str]): + is_private (Union[Unset, bool]): + """ text: str - uuid: str - alerts: List[Alert] + uuid: UUID + alerts: list["Alert"] created_dt: datetime.datetime embargoed: bool creator: Union[Unset, str] = UNSET is_private: Union[Unset, bool] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: text = self.text - uuid = self.uuid - alerts: List[Dict[str, Any]] = UNSET + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -42,10 +62,12 @@ def to_dict(self) -> Dict[str, Any]: created_dt = self.created_dt.isoformat() embargoed = self.embargoed + creator = self.creator + is_private = self.is_private - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(text, Unset): field_dict["text"] = text @@ -64,47 +86,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - text = self.text if self.text is UNSET else (None, str(self.text), "text/plain") - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + text = (None, str(self.text).encode(), "text/plain") + + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + creator = self.creator if isinstance(self.creator, Unset) else (None, str(self.creator).encode(), "text/plain") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - creator = ( - self.creator - if self.creator is UNSET - else (None, str(self.creator), "text/plain") - ) is_private = ( self.is_private - if self.is_private is UNSET - else (None, str(self.is_private), "text/plain") + if isinstance(self.is_private, Unset) + else (None, str(self.is_private).encode(), "text/plain") ) - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") + if not isinstance(text, Unset): field_dict["text"] = text if not isinstance(uuid, Unset): @@ -123,27 +140,34 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() text = d.pop("text", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -174,8 +198,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "text": str, - "uuid": str, - "alerts": List[Alert], + "uuid": UUID, + "alerts": list["Alert"], "created_dt": datetime.datetime, "embargoed": bool, "creator": str, @@ -183,7 +207,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_cvss.py b/osidb_bindings/bindings/python_client/models/flaw_cvss.py index 84a3196..8474c99 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_cvss.py +++ b/osidb_bindings/bindings/python_client/models/flaw_cvss.py @@ -1,54 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.cvss_version_enum import CvssVersionEnum from ..models.issuer_enum import IssuerEnum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="FlawCVSS") -@attr.s(auto_attribs=True) +@_attrs_define class FlawCVSS(OSIDBModel): - """FlawCVSS serializer""" + """FlawCVSS serializer + + Attributes: + cvss_version (CvssVersionEnum): + issuer (IssuerEnum): + score (float): + uuid (UUID): + vector (str): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + flaw (Union[Unset, UUID]): + comment (Union[None, Unset, str]): + """ cvss_version: CvssVersionEnum issuer: IssuerEnum score: float - uuid: str + uuid: UUID vector: str embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - flaw: Union[Unset, str] = UNSET - comment: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + flaw: Union[Unset, UUID] = UNSET + comment: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cvss_version: str = UNSET if not isinstance(self.cvss_version, Unset): - cvss_version = CvssVersionEnum(self.cvss_version).value issuer: str = UNSET if not isinstance(self.issuer, Unset): - issuer = IssuerEnum(self.issuer).value score = self.score - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + vector = self.vector + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -62,10 +87,17 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - flaw = self.flaw - comment = self.comment + flaw: Union[Unset, str] = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + comment: Union[None, Unset, str] + if isinstance(self.comment, Unset): + comment = UNSET + else: + comment = self.comment - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version @@ -93,8 +125,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() + # } _cvss_version = d.pop("cvss_version", UNSET) cvss_version: CvssVersionEnum if isinstance(_cvss_version, Unset): @@ -102,6 +137,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cvss_version = CvssVersionEnum(_cvss_version) + # } _issuer = d.pop("issuer", UNSET) issuer: IssuerEnum if isinstance(_issuer, Unset): @@ -111,7 +147,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: score = d.pop("score", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) vector = d.pop("vector", UNSET) @@ -119,19 +161,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -139,6 +180,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -146,9 +188,22 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: Union[Unset, UUID] + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) + + def _parse_comment(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) - comment = d.pop("comment", UNSET) + comment = _parse_comment(d.pop("comment", UNSET)) flaw_cvss = cls( cvss_version=cvss_version, @@ -173,18 +228,18 @@ def get_fields(): "cvss_version": CvssVersionEnum, "issuer": IssuerEnum, "score": float, - "uuid": str, + "uuid": UUID, "vector": str, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "flaw": str, - "comment": str, + "flaw": UUID, + "comment": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_cvss_post.py b/osidb_bindings/bindings/python_client/models/flaw_cvss_post.py index c6c486e..09533b5 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_cvss_post.py +++ b/osidb_bindings/bindings/python_client/models/flaw_cvss_post.py @@ -1,53 +1,75 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.cvss_version_enum import CvssVersionEnum from ..models.issuer_enum import IssuerEnum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="FlawCVSSPost") -@attr.s(auto_attribs=True) +@_attrs_define class FlawCVSSPost(OSIDBModel): - """FlawCVSS serializer""" + """FlawCVSS serializer + + Attributes: + cvss_version (CvssVersionEnum): + issuer (IssuerEnum): + score (float): + uuid (UUID): + vector (str): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + comment (Union[None, Unset, str]): + """ cvss_version: CvssVersionEnum issuer: IssuerEnum score: float - uuid: str + uuid: UUID vector: str embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime - comment: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + comment: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cvss_version: str = UNSET if not isinstance(self.cvss_version, Unset): - cvss_version = CvssVersionEnum(self.cvss_version).value issuer: str = UNSET if not isinstance(self.issuer, Unset): - issuer = IssuerEnum(self.issuer).value score = self.score - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + vector = self.vector + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -57,9 +79,13 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() - comment = self.comment + comment: Union[None, Unset, str] + if isinstance(self.comment, Unset): + comment = UNSET + else: + comment = self.comment - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version @@ -82,59 +108,55 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - cvss_version: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + cvss_version: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.cvss_version, Unset): + cvss_version = (None, str(self.cvss_version.value).encode(), "text/plain") + # CHANGE END (3) #} - cvss_version = CvssVersionEnum(self.cvss_version).value - - issuer: Union[Unset, Tuple[None, str, str]] = UNSET + issuer: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.issuer, Unset): + issuer = (None, str(self.issuer.value).encode(), "text/plain") + # CHANGE END (3) #} - issuer = IssuerEnum(self.issuer).value + score = (None, str(self.score).encode(), "text/plain") - score = ( - self.score if self.score is UNSET else (None, str(self.score), "text/plain") - ) - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - vector = ( - self.vector - if self.vector is UNSET - else (None, str(self.vector), "text/plain") - ) - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + vector = (None, str(self.vector).encode(), "text/plain") + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() - comment = ( - self.comment - if self.comment is UNSET - else (None, str(self.comment), "text/plain") - ) + comment: Union[Unset, tuple[None, bytes, str]] + + if isinstance(self.comment, Unset): + comment = UNSET + elif isinstance(self.comment, str): + comment = (None, str(self.comment).encode(), "text/plain") + else: + comment = (None, str(self.comment).encode(), "text/plain") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version if not isinstance(issuer, Unset): @@ -157,8 +179,11 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() + # } _cvss_version = d.pop("cvss_version", UNSET) cvss_version: CvssVersionEnum if isinstance(_cvss_version, Unset): @@ -166,6 +191,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cvss_version = CvssVersionEnum(_cvss_version) + # } _issuer = d.pop("issuer", UNSET) issuer: IssuerEnum if isinstance(_issuer, Unset): @@ -175,7 +201,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: score = d.pop("score", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) vector = d.pop("vector", UNSET) @@ -183,19 +215,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -203,7 +234,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) - comment = d.pop("comment", UNSET) + def _parse_comment(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + comment = _parse_comment(d.pop("comment", UNSET)) flaw_cvss_post = cls( cvss_version=cvss_version, @@ -226,16 +264,16 @@ def get_fields(): "cvss_version": CvssVersionEnum, "issuer": IssuerEnum, "score": float, - "uuid": str, + "uuid": UUID, "vector": str, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, - "comment": str, + "comment": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_cvss_put.py b/osidb_bindings/bindings/python_client/models/flaw_cvss_put.py index cf5371b..e48f855 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_cvss_put.py +++ b/osidb_bindings/bindings/python_client/models/flaw_cvss_put.py @@ -1,54 +1,78 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.cvss_version_enum import CvssVersionEnum from ..models.issuer_enum import IssuerEnum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="FlawCVSSPut") -@attr.s(auto_attribs=True) +@_attrs_define class FlawCVSSPut(OSIDBModel): - """FlawCVSS serializer""" + """FlawCVSS serializer + + Attributes: + cvss_version (CvssVersionEnum): + issuer (IssuerEnum): + score (float): + uuid (UUID): + vector (str): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + comment (Union[None, Unset, str]): + """ cvss_version: CvssVersionEnum issuer: IssuerEnum score: float - uuid: str + uuid: UUID vector: str embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - comment: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + comment: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cvss_version: str = UNSET if not isinstance(self.cvss_version, Unset): - cvss_version = CvssVersionEnum(self.cvss_version).value issuer: str = UNSET if not isinstance(self.issuer, Unset): - issuer = IssuerEnum(self.issuer).value score = self.score - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + vector = self.vector + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -62,9 +86,13 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - comment = self.comment + comment: Union[None, Unset, str] + if isinstance(self.comment, Unset): + comment = UNSET + else: + comment = self.comment - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version @@ -89,63 +117,59 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - cvss_version: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + cvss_version: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.cvss_version, Unset): + cvss_version = (None, str(self.cvss_version.value).encode(), "text/plain") + # CHANGE END (3) #} - cvss_version = CvssVersionEnum(self.cvss_version).value - - issuer: Union[Unset, Tuple[None, str, str]] = UNSET + issuer: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.issuer, Unset): + issuer = (None, str(self.issuer.value).encode(), "text/plain") + # CHANGE END (3) #} - issuer = IssuerEnum(self.issuer).value + score = (None, str(self.score).encode(), "text/plain") - score = ( - self.score if self.score is UNSET else (None, str(self.score), "text/plain") - ) - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - vector = ( - self.vector - if self.vector is UNSET - else (None, str(self.vector), "text/plain") - ) - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + vector = (None, str(self.vector).encode(), "text/plain") + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() - updated_dt: str = UNSET + updated_dt: bytes = UNSET if not isinstance(self.updated_dt, Unset): - updated_dt = self.updated_dt.isoformat() + updated_dt = self.updated_dt.isoformat().encode() - comment = ( - self.comment - if self.comment is UNSET - else (None, str(self.comment), "text/plain") - ) + comment: Union[Unset, tuple[None, bytes, str]] + + if isinstance(self.comment, Unset): + comment = UNSET + elif isinstance(self.comment, str): + comment = (None, str(self.comment).encode(), "text/plain") + else: + comment = (None, str(self.comment).encode(), "text/plain") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version if not isinstance(issuer, Unset): @@ -170,8 +194,11 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() + # } _cvss_version = d.pop("cvss_version", UNSET) cvss_version: CvssVersionEnum if isinstance(_cvss_version, Unset): @@ -179,6 +206,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cvss_version = CvssVersionEnum(_cvss_version) + # } _issuer = d.pop("issuer", UNSET) issuer: IssuerEnum if isinstance(_issuer, Unset): @@ -188,7 +216,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: score = d.pop("score", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) vector = d.pop("vector", UNSET) @@ -196,19 +230,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -216,6 +249,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -223,7 +257,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - comment = d.pop("comment", UNSET) + def _parse_comment(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + comment = _parse_comment(d.pop("comment", UNSET)) flaw_cvss_put = cls( cvss_version=cvss_version, @@ -247,17 +288,17 @@ def get_fields(): "cvss_version": CvssVersionEnum, "issuer": IssuerEnum, "score": float, - "uuid": str, + "uuid": UUID, "vector": str, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "comment": str, + "comment": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_package_version.py b/osidb_bindings/bindings/python_client/models/flaw_package_version.py index 0cf1e35..5a5ec94 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_package_version.py +++ b/osidb_bindings/bindings/python_client/models/flaw_package_version.py @@ -1,43 +1,68 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw_version import FlawVersion from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_version import FlawVersion + + T = TypeVar("T", bound="FlawPackageVersion") -@attr.s(auto_attribs=True) +@_attrs_define class FlawPackageVersion(OSIDBModel): - """Package model serializer""" + """Package model serializer + + Attributes: + package (str): + versions (list['FlawVersion']): + flaw (UUID): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + """ package: str - versions: List[FlawVersion] - flaw: str - uuid: str + versions: list["FlawVersion"] + flaw: UUID + uuid: UUID embargoed: bool created_dt: datetime.datetime updated_dt: datetime.datetime - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: package = self.package - versions: List[Dict[str, Any]] = UNSET + + versions: list[dict[str, Any]] = UNSET if not isinstance(self.versions, Unset): versions = [] for versions_item_data in self.versions: - versions_item: Dict[str, Any] = UNSET + versions_item: dict[str, Any] = UNSET if not isinstance(versions_item_data, Unset): versions_item = versions_item_data.to_dict() versions.append(versions_item) - flaw = self.flaw - uuid = self.uuid + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed + created_dt: str = UNSET if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() @@ -46,7 +71,7 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(package, Unset): field_dict["package"] = package @@ -66,31 +91,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_version import FlawVersion + d = src_dict.copy() package = d.pop("package", UNSET) versions = [] _versions = d.pop("versions", UNSET) - if _versions is UNSET: - versions = UNSET + for versions_item_data in _versions or []: + # } + _versions_item = versions_item_data + versions_item: FlawVersion + if isinstance(_versions_item, Unset): + versions_item = UNSET + else: + versions_item = FlawVersion.from_dict(_versions_item) + + versions.append(versions_item) + + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET else: - for versions_item_data in _versions or []: - _versions_item = versions_item_data - versions_item: FlawVersion - if isinstance(_versions_item, Unset): - versions_item = UNSET - else: - versions_item = FlawVersion.from_dict(_versions_item) - - versions.append(versions_item) + flaw = UUID(_flaw) - flaw = d.pop("flaw", UNSET) - - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -98,6 +136,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -122,16 +161,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "package": str, - "versions": List[FlawVersion], - "flaw": str, - "uuid": str, + "versions": list["FlawVersion"], + "flaw": UUID, + "uuid": UUID, "embargoed": bool, "created_dt": datetime.datetime, "updated_dt": datetime.datetime, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_package_version_post.py b/osidb_bindings/bindings/python_client/models/flaw_package_version_post.py index b2f3255..6dc5289 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_package_version_post.py +++ b/osidb_bindings/bindings/python_client/models/flaw_package_version_post.py @@ -1,46 +1,65 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw_version import FlawVersion from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_version import FlawVersion + + T = TypeVar("T", bound="FlawPackageVersionPost") -@attr.s(auto_attribs=True) +@_attrs_define class FlawPackageVersionPost(OSIDBModel): - """Package model serializer""" + """Package model serializer + + Attributes: + package (str): + versions (list['FlawVersion']): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + created_dt (datetime.datetime): + """ package: str - versions: List[FlawVersion] - uuid: str + versions: list["FlawVersion"] + uuid: UUID embargoed: bool created_dt: datetime.datetime - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: package = self.package - versions: List[Dict[str, Any]] = UNSET + + versions: list[dict[str, Any]] = UNSET if not isinstance(self.versions, Unset): versions = [] for versions_item_data in self.versions: - versions_item: Dict[str, Any] = UNSET + versions_item: dict[str, Any] = UNSET if not isinstance(versions_item_data, Unset): versions_item = versions_item_data.to_dict() versions.append(versions_item) - uuid = self.uuid + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed + created_dt: str = UNSET if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(package, Unset): field_dict["package"] = package @@ -55,40 +74,34 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - package = ( - self.package - if self.package is UNSET - else (None, str(self.package), "text/plain") - ) - versions: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + package = (None, str(self.package).encode(), "text/plain") + + versions: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.versions, Unset): _temp_versions = [] for versions_item_data in self.versions: - versions_item: Dict[str, Any] = UNSET + versions_item: dict[str, Any] = UNSET if not isinstance(versions_item_data, Unset): versions_item = versions_item_data.to_dict() _temp_versions.append(versions_item) - versions = (None, json.dumps(_temp_versions), "application/json") + versions = (None, json.dumps(_temp_versions).encode(), "application/json") - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - created_dt: str = UNSET + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(package, Unset): field_dict["package"] = package if not isinstance(versions, Unset): @@ -103,29 +116,36 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_version import FlawVersion + d = src_dict.copy() package = d.pop("package", UNSET) versions = [] _versions = d.pop("versions", UNSET) - if _versions is UNSET: - versions = UNSET + for versions_item_data in _versions or []: + # } + _versions_item = versions_item_data + versions_item: FlawVersion + if isinstance(_versions_item, Unset): + versions_item = UNSET + else: + versions_item = FlawVersion.from_dict(_versions_item) + + versions.append(versions_item) + + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET else: - for versions_item_data in _versions or []: - _versions_item = versions_item_data - versions_item: FlawVersion - if isinstance(_versions_item, Unset): - versions_item = UNSET - else: - versions_item = FlawVersion.from_dict(_versions_item) - - versions.append(versions_item) - - uuid = d.pop("uuid", UNSET) + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -148,14 +168,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "package": str, - "versions": List[FlawVersion], - "uuid": str, + "versions": list["FlawVersion"], + "uuid": UUID, "embargoed": bool, "created_dt": datetime.datetime, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_package_version_put.py b/osidb_bindings/bindings/python_client/models/flaw_package_version_put.py index faf4cbf..ca18700 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_package_version_put.py +++ b/osidb_bindings/bindings/python_client/models/flaw_package_version_put.py @@ -1,42 +1,63 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw_version import FlawVersion from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_version import FlawVersion + + T = TypeVar("T", bound="FlawPackageVersionPut") -@attr.s(auto_attribs=True) +@_attrs_define class FlawPackageVersionPut(OSIDBModel): - """Package model serializer""" + """Package model serializer + + Attributes: + package (str): + versions (list['FlawVersion']): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + """ package: str - versions: List[FlawVersion] - uuid: str + versions: list["FlawVersion"] + uuid: UUID embargoed: bool created_dt: datetime.datetime updated_dt: datetime.datetime - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: package = self.package - versions: List[Dict[str, Any]] = UNSET + + versions: list[dict[str, Any]] = UNSET if not isinstance(self.versions, Unset): versions = [] for versions_item_data in self.versions: - versions_item: Dict[str, Any] = UNSET + versions_item: dict[str, Any] = UNSET if not isinstance(versions_item_data, Unset): versions_item = versions_item_data.to_dict() versions.append(versions_item) - uuid = self.uuid + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed + created_dt: str = UNSET if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() @@ -45,7 +66,7 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(package, Unset): field_dict["package"] = package @@ -62,44 +83,38 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - package = ( - self.package - if self.package is UNSET - else (None, str(self.package), "text/plain") - ) - versions: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + package = (None, str(self.package).encode(), "text/plain") + + versions: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.versions, Unset): _temp_versions = [] for versions_item_data in self.versions: - versions_item: Dict[str, Any] = UNSET + versions_item: dict[str, Any] = UNSET if not isinstance(versions_item_data, Unset): versions_item = versions_item_data.to_dict() _temp_versions.append(versions_item) - versions = (None, json.dumps(_temp_versions), "application/json") + versions = (None, json.dumps(_temp_versions).encode(), "application/json") - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - created_dt: str = UNSET + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() - updated_dt: str = UNSET + updated_dt: bytes = UNSET if not isinstance(self.updated_dt, Unset): - updated_dt = self.updated_dt.isoformat() + updated_dt = self.updated_dt.isoformat().encode() + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(package, Unset): field_dict["package"] = package if not isinstance(versions, Unset): @@ -116,29 +131,36 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_version import FlawVersion + d = src_dict.copy() package = d.pop("package", UNSET) versions = [] _versions = d.pop("versions", UNSET) - if _versions is UNSET: - versions = UNSET + for versions_item_data in _versions or []: + # } + _versions_item = versions_item_data + versions_item: FlawVersion + if isinstance(_versions_item, Unset): + versions_item = UNSET + else: + versions_item = FlawVersion.from_dict(_versions_item) + + versions.append(versions_item) + + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET else: - for versions_item_data in _versions or []: - _versions_item = versions_item_data - versions_item: FlawVersion - if isinstance(_versions_item, Unset): - versions_item = UNSET - else: - versions_item = FlawVersion.from_dict(_versions_item) - - versions.append(versions_item) - - uuid = d.pop("uuid", UNSET) + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -146,6 +168,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -169,15 +192,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "package": str, - "versions": List[FlawVersion], - "uuid": str, + "versions": list["FlawVersion"], + "uuid": UUID, "embargoed": bool, "created_dt": datetime.datetime, "updated_dt": datetime.datetime, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_post.py b/osidb_bindings/bindings/python_client/models/flaw_post.py index ec9d766..31f8bf9 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_post.py +++ b/osidb_bindings/bindings/python_client/models/flaw_post.py @@ -1,201 +1,253 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect import Affect -from ..models.alert import Alert from ..models.blank_enum import BlankEnum -from ..models.comment import Comment -from ..models.flaw_acknowledgment import FlawAcknowledgment -from ..models.flaw_cvss import FlawCVSS -from ..models.flaw_post_classification import FlawPostClassification -from ..models.flaw_reference import FlawReference from ..models.impact_enum import ImpactEnum from ..models.major_incident_state_enum import MajorIncidentStateEnum from ..models.nist_cvss_validation_enum import NistCvssValidationEnum -from ..models.package import Package from ..models.requires_cve_description_enum import RequiresCveDescriptionEnum from ..models.source_be_0_enum import SourceBe0Enum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect import Affect + from ..models.alert import Alert + from ..models.comment import Comment + from ..models.flaw_acknowledgment import FlawAcknowledgment + from ..models.flaw_cvss import FlawCVSS + from ..models.flaw_post_classification import FlawPostClassification + from ..models.flaw_reference import FlawReference + from ..models.package import Package + + T = TypeVar("T", bound="FlawPost") -@attr.s(auto_attribs=True) +@_attrs_define class FlawPost(OSIDBModel): - """serialize flaw model""" - - uuid: str + """serialize flaw model + + Attributes: + uuid (UUID): + title (str): + trackers (list[str]): + comment_zero (str): + affects (list['Affect']): + comments (list['Comment']): + package_versions (list['Package']): + acknowledgments (list['FlawAcknowledgment']): + references (list['FlawReference']): + cvss_scores (list['FlawCVSS']): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + created_dt (datetime.datetime): + classification (FlawPostClassification): + alerts (list['Alert']): + cve_id (Union[None, Unset, str]): + impact (Union[BlankEnum, ImpactEnum, Unset]): + components (Union[Unset, list[str]]): + cve_description (Union[Unset, str]): + requires_cve_description (Union[BlankEnum, RequiresCveDescriptionEnum, Unset]): + statement (Union[Unset, str]): + cwe_id (Union[Unset, str]): + unembargo_dt (Union[None, Unset, datetime.datetime]): + source (Union[BlankEnum, SourceBe0Enum, Unset]): + reported_dt (Union[None, Unset, datetime.datetime]): + mitigation (Union[Unset, str]): + major_incident_state (Union[BlankEnum, MajorIncidentStateEnum, Unset]): + major_incident_start_dt (Union[None, Unset, datetime.datetime]): + nist_cvss_validation (Union[BlankEnum, NistCvssValidationEnum, Unset]): + group_key (Union[Unset, str]): + owner (Union[Unset, str]): + task_key (Union[Unset, str]): + team_id (Union[Unset, str]): + """ + + uuid: UUID title: str - trackers: List[str] + trackers: list[str] comment_zero: str - affects: List[Affect] - comments: List[Comment] - package_versions: List[Package] - acknowledgments: List[FlawAcknowledgment] - references: List[FlawReference] - cvss_scores: List[FlawCVSS] + affects: list["Affect"] + comments: list["Comment"] + package_versions: list["Package"] + acknowledgments: list["FlawAcknowledgment"] + references: list["FlawReference"] + cvss_scores: list["FlawCVSS"] embargoed: bool created_dt: datetime.datetime - classification: FlawPostClassification - alerts: List[Alert] - cve_id: Union[Unset, None, str] = UNSET + classification: "FlawPostClassification" + alerts: list["Alert"] + cve_id: Union[None, Unset, str] = UNSET impact: Union[BlankEnum, ImpactEnum, Unset] = UNSET - components: Union[Unset, List[str]] = UNSET + components: Union[Unset, list[str]] = UNSET cve_description: Union[Unset, str] = UNSET - requires_cve_description: Union[ - BlankEnum, RequiresCveDescriptionEnum, Unset - ] = UNSET + requires_cve_description: Union[BlankEnum, RequiresCveDescriptionEnum, Unset] = UNSET statement: Union[Unset, str] = UNSET cwe_id: Union[Unset, str] = UNSET - unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET + unembargo_dt: Union[None, Unset, datetime.datetime] = UNSET source: Union[BlankEnum, SourceBe0Enum, Unset] = UNSET - reported_dt: Union[Unset, None, datetime.datetime] = UNSET + reported_dt: Union[None, Unset, datetime.datetime] = UNSET mitigation: Union[Unset, str] = UNSET major_incident_state: Union[BlankEnum, MajorIncidentStateEnum, Unset] = UNSET - major_incident_start_dt: Union[Unset, None, datetime.datetime] = UNSET + major_incident_start_dt: Union[None, Unset, datetime.datetime] = UNSET nist_cvss_validation: Union[BlankEnum, NistCvssValidationEnum, Unset] = UNSET group_key: Union[Unset, str] = UNSET owner: Union[Unset, str] = UNSET task_key: Union[Unset, str] = UNSET team_id: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid title = self.title - trackers: List[str] = UNSET + + trackers: list[str] = UNSET if not isinstance(self.trackers, Unset): trackers = self.trackers comment_zero = self.comment_zero - affects: List[Dict[str, Any]] = UNSET + + affects: list[dict[str, Any]] = UNSET if not isinstance(self.affects, Unset): affects = [] for affects_item_data in self.affects: - affects_item: Dict[str, Any] = UNSET + affects_item: dict[str, Any] = UNSET if not isinstance(affects_item_data, Unset): affects_item = affects_item_data.to_dict() affects.append(affects_item) - comments: List[Dict[str, Any]] = UNSET + comments: list[dict[str, Any]] = UNSET if not isinstance(self.comments, Unset): comments = [] for comments_item_data in self.comments: - comments_item: Dict[str, Any] = UNSET + comments_item: dict[str, Any] = UNSET if not isinstance(comments_item_data, Unset): comments_item = comments_item_data.to_dict() comments.append(comments_item) - package_versions: List[Dict[str, Any]] = UNSET + package_versions: list[dict[str, Any]] = UNSET if not isinstance(self.package_versions, Unset): package_versions = [] for package_versions_item_data in self.package_versions: - package_versions_item: Dict[str, Any] = UNSET + package_versions_item: dict[str, Any] = UNSET if not isinstance(package_versions_item_data, Unset): package_versions_item = package_versions_item_data.to_dict() package_versions.append(package_versions_item) - acknowledgments: List[Dict[str, Any]] = UNSET + acknowledgments: list[dict[str, Any]] = UNSET if not isinstance(self.acknowledgments, Unset): acknowledgments = [] for acknowledgments_item_data in self.acknowledgments: - acknowledgments_item: Dict[str, Any] = UNSET + acknowledgments_item: dict[str, Any] = UNSET if not isinstance(acknowledgments_item_data, Unset): acknowledgments_item = acknowledgments_item_data.to_dict() acknowledgments.append(acknowledgments_item) - references: List[Dict[str, Any]] = UNSET + references: list[dict[str, Any]] = UNSET if not isinstance(self.references, Unset): references = [] for references_item_data in self.references: - references_item: Dict[str, Any] = UNSET + references_item: dict[str, Any] = UNSET if not isinstance(references_item_data, Unset): references_item = references_item_data.to_dict() references.append(references_item) - cvss_scores: List[Dict[str, Any]] = UNSET + cvss_scores: list[dict[str, Any]] = UNSET if not isinstance(self.cvss_scores, Unset): cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() cvss_scores.append(cvss_scores_item) embargoed = self.embargoed + created_dt: str = UNSET if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() - classification: Dict[str, Any] = UNSET + classification: dict[str, Any] = UNSET if not isinstance(self.classification, Unset): classification = self.classification.to_dict() - alerts: List[Dict[str, Any]] = UNSET + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() alerts.append(alerts_item) - cve_id = self.cve_id + cve_id: Union[None, Unset, str] + if isinstance(self.cve_id, Unset): + cve_id = UNSET + else: + cve_id = self.cve_id + impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): impact = UNSET if not isinstance(self.impact, Unset): - impact = ImpactEnum(self.impact).value else: impact = UNSET if not isinstance(self.impact, Unset): - impact = BlankEnum(self.impact).value - components: Union[Unset, List[str]] = UNSET + components: Union[Unset, list[str]] = UNSET if not isinstance(self.components, Unset): components = self.components cve_description = self.cve_description + requires_cve_description: Union[Unset, str] if isinstance(self.requires_cve_description, Unset): requires_cve_description = UNSET elif isinstance(self.requires_cve_description, RequiresCveDescriptionEnum): requires_cve_description = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = RequiresCveDescriptionEnum( - self.requires_cve_description - ).value + requires_cve_description = RequiresCveDescriptionEnum(self.requires_cve_description).value else: requires_cve_description = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = BlankEnum( - self.requires_cve_description - ).value + requires_cve_description = BlankEnum(self.requires_cve_description).value statement = self.statement + cwe_id = self.cwe_id - unembargo_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.unembargo_dt, Unset): - unembargo_dt = self.unembargo_dt.isoformat() if self.unembargo_dt else None + + unembargo_dt: Union[None, Unset, str] + if isinstance(self.unembargo_dt, Unset): + unembargo_dt = UNSET + elif isinstance(self.unembargo_dt, datetime.datetime): + unembargo_dt = UNSET + if not isinstance(self.unembargo_dt, Unset): + unembargo_dt = self.unembargo_dt.isoformat() + + else: + unembargo_dt = self.unembargo_dt source: Union[Unset, str] if isinstance(self.source, Unset): @@ -203,44 +255,49 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.source, SourceBe0Enum): source = UNSET if not isinstance(self.source, Unset): - source = SourceBe0Enum(self.source).value else: source = UNSET if not isinstance(self.source, Unset): - source = BlankEnum(self.source).value - reported_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.reported_dt, Unset): - reported_dt = self.reported_dt.isoformat() if self.reported_dt else None + reported_dt: Union[None, Unset, str] + if isinstance(self.reported_dt, Unset): + reported_dt = UNSET + elif isinstance(self.reported_dt, datetime.datetime): + reported_dt = UNSET + if not isinstance(self.reported_dt, Unset): + reported_dt = self.reported_dt.isoformat() + + else: + reported_dt = self.reported_dt mitigation = self.mitigation + major_incident_state: Union[Unset, str] if isinstance(self.major_incident_state, Unset): major_incident_state = UNSET elif isinstance(self.major_incident_state, MajorIncidentStateEnum): major_incident_state = UNSET if not isinstance(self.major_incident_state, Unset): - - major_incident_state = MajorIncidentStateEnum( - self.major_incident_state - ).value + major_incident_state = MajorIncidentStateEnum(self.major_incident_state).value else: major_incident_state = UNSET if not isinstance(self.major_incident_state, Unset): - major_incident_state = BlankEnum(self.major_incident_state).value - major_incident_start_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.major_incident_start_dt, Unset): - major_incident_start_dt = ( - self.major_incident_start_dt.isoformat() - if self.major_incident_start_dt - else None - ) + major_incident_start_dt: Union[None, Unset, str] + if isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = UNSET + elif isinstance(self.major_incident_start_dt, datetime.datetime): + major_incident_start_dt = UNSET + if not isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = self.major_incident_start_dt.isoformat() + + else: + major_incident_start_dt = self.major_incident_start_dt nist_cvss_validation: Union[Unset, str] if isinstance(self.nist_cvss_validation, Unset): @@ -248,23 +305,22 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.nist_cvss_validation, NistCvssValidationEnum): nist_cvss_validation = UNSET if not isinstance(self.nist_cvss_validation, Unset): - - nist_cvss_validation = NistCvssValidationEnum( - self.nist_cvss_validation - ).value + nist_cvss_validation = NistCvssValidationEnum(self.nist_cvss_validation).value else: nist_cvss_validation = UNSET if not isinstance(self.nist_cvss_validation, Unset): - nist_cvss_validation = BlankEnum(self.nist_cvss_validation).value group_key = self.group_key + owner = self.owner + task_key = self.task_key + team_id = self.team_id - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid @@ -333,278 +389,263 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - title = ( - self.title if self.title is UNSET else (None, str(self.title), "text/plain") - ) - trackers: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + title = (None, str(self.title).encode(), "text/plain") + + trackers: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.trackers, Unset): _temp_trackers = self.trackers - trackers = (None, json.dumps(_temp_trackers), "application/json") + trackers = (None, json.dumps(_temp_trackers).encode(), "application/json") - comment_zero = ( - self.comment_zero - if self.comment_zero is UNSET - else (None, str(self.comment_zero), "text/plain") - ) - affects: Union[Unset, Tuple[None, str, str]] = UNSET + comment_zero = (None, str(self.comment_zero).encode(), "text/plain") + + affects: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.affects, Unset): _temp_affects = [] for affects_item_data in self.affects: - affects_item: Dict[str, Any] = UNSET + affects_item: dict[str, Any] = UNSET if not isinstance(affects_item_data, Unset): affects_item = affects_item_data.to_dict() _temp_affects.append(affects_item) - affects = (None, json.dumps(_temp_affects), "application/json") + affects = (None, json.dumps(_temp_affects).encode(), "application/json") - comments: Union[Unset, Tuple[None, str, str]] = UNSET + comments: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.comments, Unset): _temp_comments = [] for comments_item_data in self.comments: - comments_item: Dict[str, Any] = UNSET + comments_item: dict[str, Any] = UNSET if not isinstance(comments_item_data, Unset): comments_item = comments_item_data.to_dict() _temp_comments.append(comments_item) - comments = (None, json.dumps(_temp_comments), "application/json") + comments = (None, json.dumps(_temp_comments).encode(), "application/json") - package_versions: Union[Unset, Tuple[None, str, str]] = UNSET + package_versions: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.package_versions, Unset): _temp_package_versions = [] for package_versions_item_data in self.package_versions: - package_versions_item: Dict[str, Any] = UNSET + package_versions_item: dict[str, Any] = UNSET if not isinstance(package_versions_item_data, Unset): package_versions_item = package_versions_item_data.to_dict() _temp_package_versions.append(package_versions_item) - package_versions = ( - None, - json.dumps(_temp_package_versions), - "application/json", - ) + package_versions = (None, json.dumps(_temp_package_versions).encode(), "application/json") - acknowledgments: Union[Unset, Tuple[None, str, str]] = UNSET + acknowledgments: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.acknowledgments, Unset): _temp_acknowledgments = [] for acknowledgments_item_data in self.acknowledgments: - acknowledgments_item: Dict[str, Any] = UNSET + acknowledgments_item: dict[str, Any] = UNSET if not isinstance(acknowledgments_item_data, Unset): acknowledgments_item = acknowledgments_item_data.to_dict() _temp_acknowledgments.append(acknowledgments_item) - acknowledgments = ( - None, - json.dumps(_temp_acknowledgments), - "application/json", - ) + acknowledgments = (None, json.dumps(_temp_acknowledgments).encode(), "application/json") - references: Union[Unset, Tuple[None, str, str]] = UNSET + references: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.references, Unset): _temp_references = [] for references_item_data in self.references: - references_item: Dict[str, Any] = UNSET + references_item: dict[str, Any] = UNSET if not isinstance(references_item_data, Unset): references_item = references_item_data.to_dict() _temp_references.append(references_item) - references = (None, json.dumps(_temp_references), "application/json") + references = (None, json.dumps(_temp_references).encode(), "application/json") - cvss_scores: Union[Unset, Tuple[None, str, str]] = UNSET + cvss_scores: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.cvss_scores, Unset): _temp_cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() _temp_cvss_scores.append(cvss_scores_item) - cvss_scores = (None, json.dumps(_temp_cvss_scores), "application/json") + cvss_scores = (None, json.dumps(_temp_cvss_scores).encode(), "application/json") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - created_dt: str = UNSET + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() - classification: Union[Unset, Tuple[None, str, str]] = UNSET + classification: tuple[None, bytes, str] = UNSET if not isinstance(self.classification, Unset): - classification = ( - None, - json.dumps(self.classification.to_dict()), - "application/json", - ) + classification = (None, json.dumps(self.classification.to_dict()).encode(), "application/json") - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") + + cve_id: Union[Unset, tuple[None, bytes, str]] + + if isinstance(self.cve_id, Unset): + cve_id = UNSET + elif isinstance(self.cve_id, str): + cve_id = (None, str(self.cve_id).encode(), "text/plain") + else: + cve_id = (None, str(self.cve_id).encode(), "text/plain") + + impact: Union[Unset, tuple[None, bytes, str]] - cve_id = ( - self.cve_id - if self.cve_id is UNSET - else (None, str(self.cve_id), "text/plain") - ) - impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): - impact = UNSET + impact: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.impact, Unset): - - impact = ImpactEnum(self.impact).value - + impact = (None, str(self.impact.value).encode(), "text/plain") + # CHANGE END (3) #} else: - impact = UNSET + impact: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.impact, Unset): + impact = (None, str(self.impact.value).encode(), "text/plain") + # CHANGE END (3) #} - impact = BlankEnum(self.impact).value - - components: Union[Unset, Tuple[None, str, str]] = UNSET + components: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.components, Unset): _temp_components = self.components - components = (None, json.dumps(_temp_components), "application/json") + components = (None, json.dumps(_temp_components).encode(), "application/json") cve_description = ( self.cve_description - if self.cve_description is UNSET - else (None, str(self.cve_description), "text/plain") + if isinstance(self.cve_description, Unset) + else (None, str(self.cve_description).encode(), "text/plain") ) - requires_cve_description: Union[Unset, str] + + requires_cve_description: Union[Unset, tuple[None, bytes, str]] + if isinstance(self.requires_cve_description, Unset): requires_cve_description = UNSET elif isinstance(self.requires_cve_description, RequiresCveDescriptionEnum): - requires_cve_description = UNSET + requires_cve_description: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = RequiresCveDescriptionEnum( - self.requires_cve_description - ).value - + requires_cve_description = (None, str(self.requires_cve_description.value).encode(), "text/plain") + # CHANGE END (3) #} else: - requires_cve_description = UNSET + requires_cve_description: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = BlankEnum( - self.requires_cve_description - ).value + requires_cve_description = (None, str(self.requires_cve_description.value).encode(), "text/plain") + # CHANGE END (3) #} statement = ( - self.statement - if self.statement is UNSET - else (None, str(self.statement), "text/plain") + self.statement if isinstance(self.statement, Unset) else (None, str(self.statement).encode(), "text/plain") ) - cwe_id = ( - self.cwe_id - if self.cwe_id is UNSET - else (None, str(self.cwe_id), "text/plain") - ) - unembargo_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.unembargo_dt, Unset): - unembargo_dt = self.unembargo_dt.isoformat() if self.unembargo_dt else None - source: Union[Unset, str] + cwe_id = self.cwe_id if isinstance(self.cwe_id, Unset) else (None, str(self.cwe_id).encode(), "text/plain") + + unembargo_dt: Union[Unset, tuple[None, bytes, str]] + + if isinstance(self.unembargo_dt, Unset): + unembargo_dt = UNSET + elif isinstance(self.unembargo_dt, datetime.datetime): + unembargo_dt: bytes = UNSET + if not isinstance(self.unembargo_dt, Unset): + unembargo_dt = self.unembargo_dt.isoformat().encode() + else: + unembargo_dt = (None, str(self.unembargo_dt).encode(), "text/plain") + + source: Union[Unset, tuple[None, bytes, str]] + if isinstance(self.source, Unset): source = UNSET elif isinstance(self.source, SourceBe0Enum): - source = UNSET + source: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.source, Unset): - - source = SourceBe0Enum(self.source).value - + source = (None, str(self.source.value).encode(), "text/plain") + # CHANGE END (3) #} else: - source = UNSET + source: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.source, Unset): + source = (None, str(self.source.value).encode(), "text/plain") + # CHANGE END (3) #} - source = BlankEnum(self.source).value + reported_dt: Union[Unset, tuple[None, bytes, str]] - reported_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.reported_dt, Unset): - reported_dt = self.reported_dt.isoformat() if self.reported_dt else None + if isinstance(self.reported_dt, Unset): + reported_dt = UNSET + elif isinstance(self.reported_dt, datetime.datetime): + reported_dt: bytes = UNSET + if not isinstance(self.reported_dt, Unset): + reported_dt = self.reported_dt.isoformat().encode() + else: + reported_dt = (None, str(self.reported_dt).encode(), "text/plain") mitigation = ( self.mitigation - if self.mitigation is UNSET - else (None, str(self.mitigation), "text/plain") + if isinstance(self.mitigation, Unset) + else (None, str(self.mitigation).encode(), "text/plain") ) - major_incident_state: Union[Unset, str] + + major_incident_state: Union[Unset, tuple[None, bytes, str]] + if isinstance(self.major_incident_state, Unset): major_incident_state = UNSET elif isinstance(self.major_incident_state, MajorIncidentStateEnum): - major_incident_state = UNSET + major_incident_state: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.major_incident_state, Unset): - - major_incident_state = MajorIncidentStateEnum( - self.major_incident_state - ).value - + major_incident_state = (None, str(self.major_incident_state.value).encode(), "text/plain") + # CHANGE END (3) #} else: - major_incident_state = UNSET + major_incident_state: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.major_incident_state, Unset): + major_incident_state = (None, str(self.major_incident_state.value).encode(), "text/plain") + # CHANGE END (3) #} - major_incident_state = BlankEnum(self.major_incident_state).value + major_incident_start_dt: Union[Unset, tuple[None, bytes, str]] - major_incident_start_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.major_incident_start_dt, Unset): - major_incident_start_dt = ( - self.major_incident_start_dt.isoformat() - if self.major_incident_start_dt - else None - ) + if isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = UNSET + elif isinstance(self.major_incident_start_dt, datetime.datetime): + major_incident_start_dt: bytes = UNSET + if not isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = self.major_incident_start_dt.isoformat().encode() + else: + major_incident_start_dt = (None, str(self.major_incident_start_dt).encode(), "text/plain") + + nist_cvss_validation: Union[Unset, tuple[None, bytes, str]] - nist_cvss_validation: Union[Unset, str] if isinstance(self.nist_cvss_validation, Unset): nist_cvss_validation = UNSET elif isinstance(self.nist_cvss_validation, NistCvssValidationEnum): - nist_cvss_validation = UNSET + nist_cvss_validation: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.nist_cvss_validation, Unset): - - nist_cvss_validation = NistCvssValidationEnum( - self.nist_cvss_validation - ).value - + nist_cvss_validation = (None, str(self.nist_cvss_validation.value).encode(), "text/plain") + # CHANGE END (3) #} else: - nist_cvss_validation = UNSET + nist_cvss_validation: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.nist_cvss_validation, Unset): - - nist_cvss_validation = BlankEnum(self.nist_cvss_validation).value + nist_cvss_validation = (None, str(self.nist_cvss_validation.value).encode(), "text/plain") + # CHANGE END (3) #} group_key = ( - self.group_key - if self.group_key is UNSET - else (None, str(self.group_key), "text/plain") - ) - owner = ( - self.owner if self.owner is UNSET else (None, str(self.owner), "text/plain") + self.group_key if isinstance(self.group_key, Unset) else (None, str(self.group_key).encode(), "text/plain") ) + + owner = self.owner if isinstance(self.owner, Unset) else (None, str(self.owner).encode(), "text/plain") + task_key = ( - self.task_key - if self.task_key is UNSET - else (None, str(self.task_key), "text/plain") - ) - team_id = ( - self.team_id - if self.team_id is UNSET - else (None, str(self.team_id), "text/plain") + self.task_key if isinstance(self.task_key, Unset) else (None, str(self.task_key).encode(), "text/plain") ) - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) + team_id = self.team_id if isinstance(self.team_id, Unset) else (None, str(self.team_id).encode(), "text/plain") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") + if not isinstance(uuid, Unset): field_dict["uuid"] = uuid if not isinstance(title, Unset): @@ -673,110 +714,112 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + from ..models.alert import Alert + from ..models.comment import Comment + from ..models.flaw_acknowledgment import FlawAcknowledgment + from ..models.flaw_cvss import FlawCVSS + from ..models.flaw_post_classification import FlawPostClassification + from ..models.flaw_reference import FlawReference + from ..models.package import Package + d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) title = d.pop("title", UNSET) - trackers = cast(List[str], d.pop("trackers", UNSET)) + trackers = cast(list[str], d.pop("trackers", UNSET)) comment_zero = d.pop("comment_zero", UNSET) affects = [] _affects = d.pop("affects", UNSET) - if _affects is UNSET: - affects = UNSET - else: - for affects_item_data in _affects or []: - _affects_item = affects_item_data - affects_item: Affect - if isinstance(_affects_item, Unset): - affects_item = UNSET - else: - affects_item = Affect.from_dict(_affects_item) + for affects_item_data in _affects or []: + # } + _affects_item = affects_item_data + affects_item: Affect + if isinstance(_affects_item, Unset): + affects_item = UNSET + else: + affects_item = Affect.from_dict(_affects_item) - affects.append(affects_item) + affects.append(affects_item) comments = [] _comments = d.pop("comments", UNSET) - if _comments is UNSET: - comments = UNSET - else: - for comments_item_data in _comments or []: - _comments_item = comments_item_data - comments_item: Comment - if isinstance(_comments_item, Unset): - comments_item = UNSET - else: - comments_item = Comment.from_dict(_comments_item) + for comments_item_data in _comments or []: + # } + _comments_item = comments_item_data + comments_item: Comment + if isinstance(_comments_item, Unset): + comments_item = UNSET + else: + comments_item = Comment.from_dict(_comments_item) - comments.append(comments_item) + comments.append(comments_item) package_versions = [] _package_versions = d.pop("package_versions", UNSET) - if _package_versions is UNSET: - package_versions = UNSET - else: - for package_versions_item_data in _package_versions or []: - _package_versions_item = package_versions_item_data - package_versions_item: Package - if isinstance(_package_versions_item, Unset): - package_versions_item = UNSET - else: - package_versions_item = Package.from_dict(_package_versions_item) + for package_versions_item_data in _package_versions or []: + # } + _package_versions_item = package_versions_item_data + package_versions_item: Package + if isinstance(_package_versions_item, Unset): + package_versions_item = UNSET + else: + package_versions_item = Package.from_dict(_package_versions_item) - package_versions.append(package_versions_item) + package_versions.append(package_versions_item) acknowledgments = [] _acknowledgments = d.pop("acknowledgments", UNSET) - if _acknowledgments is UNSET: - acknowledgments = UNSET - else: - for acknowledgments_item_data in _acknowledgments or []: - _acknowledgments_item = acknowledgments_item_data - acknowledgments_item: FlawAcknowledgment - if isinstance(_acknowledgments_item, Unset): - acknowledgments_item = UNSET - else: - acknowledgments_item = FlawAcknowledgment.from_dict( - _acknowledgments_item - ) + for acknowledgments_item_data in _acknowledgments or []: + # } + _acknowledgments_item = acknowledgments_item_data + acknowledgments_item: FlawAcknowledgment + if isinstance(_acknowledgments_item, Unset): + acknowledgments_item = UNSET + else: + acknowledgments_item = FlawAcknowledgment.from_dict(_acknowledgments_item) - acknowledgments.append(acknowledgments_item) + acknowledgments.append(acknowledgments_item) references = [] _references = d.pop("references", UNSET) - if _references is UNSET: - references = UNSET - else: - for references_item_data in _references or []: - _references_item = references_item_data - references_item: FlawReference - if isinstance(_references_item, Unset): - references_item = UNSET - else: - references_item = FlawReference.from_dict(_references_item) + for references_item_data in _references or []: + # } + _references_item = references_item_data + references_item: FlawReference + if isinstance(_references_item, Unset): + references_item = UNSET + else: + references_item = FlawReference.from_dict(_references_item) - references.append(references_item) + references.append(references_item) cvss_scores = [] _cvss_scores = d.pop("cvss_scores", UNSET) - if _cvss_scores is UNSET: - cvss_scores = UNSET - else: - for cvss_scores_item_data in _cvss_scores or []: - _cvss_scores_item = cvss_scores_item_data - cvss_scores_item: FlawCVSS - if isinstance(_cvss_scores_item, Unset): - cvss_scores_item = UNSET - else: - cvss_scores_item = FlawCVSS.from_dict(_cvss_scores_item) + for cvss_scores_item_data in _cvss_scores or []: + # } + _cvss_scores_item = cvss_scores_item_data + cvss_scores_item: FlawCVSS + if isinstance(_cvss_scores_item, Unset): + cvss_scores_item = UNSET + else: + cvss_scores_item = FlawCVSS.from_dict(_cvss_scores_item) - cvss_scores.append(cvss_scores_item) + cvss_scores.append(cvss_scores_item) embargoed = d.pop("embargoed", UNSET) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -784,6 +827,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _classification = d.pop("classification", UNSET) classification: FlawPostClassification if isinstance(_classification, Unset): @@ -793,20 +837,25 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) - alerts.append(alerts_item) + alerts.append(alerts_item) - cve_id = d.pop("cve_id", UNSET) + def _parse_cve_id(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + cve_id = _parse_cve_id(d.pop("cve_id", UNSET)) def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: if isinstance(data, Unset): @@ -814,8 +863,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _impact_type_0 = data - impact_type_0: Union[Unset, ImpactEnum] + impact_type_0: ImpactEnum if isinstance(_impact_type_0, Unset): impact_type_0 = UNSET else: @@ -826,8 +876,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _impact_type_1 = data - impact_type_1: Union[Unset, BlankEnum] + impact_type_1: BlankEnum if isinstance(_impact_type_1, Unset): impact_type_1 = UNSET else: @@ -837,61 +888,67 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: impact = _parse_impact(d.pop("impact", UNSET)) - components = cast(List[str], d.pop("components", UNSET)) + components = cast(list[str], d.pop("components", UNSET)) cve_description = d.pop("cve_description", UNSET) - def _parse_requires_cve_description( - data: object, - ) -> Union[BlankEnum, RequiresCveDescriptionEnum, Unset]: + def _parse_requires_cve_description(data: object) -> Union[BlankEnum, RequiresCveDescriptionEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _requires_cve_description_type_0 = data - requires_cve_description_type_0: Union[ - Unset, RequiresCveDescriptionEnum - ] + requires_cve_description_type_0: RequiresCveDescriptionEnum if isinstance(_requires_cve_description_type_0, Unset): requires_cve_description_type_0 = UNSET else: - requires_cve_description_type_0 = RequiresCveDescriptionEnum( - _requires_cve_description_type_0 - ) + requires_cve_description_type_0 = RequiresCveDescriptionEnum(_requires_cve_description_type_0) return requires_cve_description_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _requires_cve_description_type_1 = data - requires_cve_description_type_1: Union[Unset, BlankEnum] + requires_cve_description_type_1: BlankEnum if isinstance(_requires_cve_description_type_1, Unset): requires_cve_description_type_1 = UNSET else: - requires_cve_description_type_1 = BlankEnum( - _requires_cve_description_type_1 - ) + requires_cve_description_type_1 = BlankEnum(_requires_cve_description_type_1) return requires_cve_description_type_1 - requires_cve_description = _parse_requires_cve_description( - d.pop("requires_cve_description", UNSET) - ) + requires_cve_description = _parse_requires_cve_description(d.pop("requires_cve_description", UNSET)) statement = d.pop("statement", UNSET) cwe_id = d.pop("cwe_id", UNSET) - _unembargo_dt = d.pop("unembargo_dt", UNSET) - unembargo_dt: Union[Unset, None, datetime.datetime] - if _unembargo_dt is None: - unembargo_dt = None - elif isinstance(_unembargo_dt, Unset): - unembargo_dt = UNSET - else: - unembargo_dt = isoparse(_unembargo_dt) + def _parse_unembargo_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _unembargo_dt_type_0 = data + unembargo_dt_type_0: datetime.datetime + if isinstance(_unembargo_dt_type_0, Unset): + unembargo_dt_type_0 = UNSET + else: + unembargo_dt_type_0 = isoparse(_unembargo_dt_type_0) + + return unembargo_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + unembargo_dt = _parse_unembargo_dt(d.pop("unembargo_dt", UNSET)) def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: if isinstance(data, Unset): @@ -899,8 +956,9 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _source_type_0 = data - source_type_0: Union[Unset, SourceBe0Enum] + source_type_0: SourceBe0Enum if isinstance(_source_type_0, Unset): source_type_0 = UNSET else: @@ -911,8 +969,9 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _source_type_1 = data - source_type_1: Union[Unset, BlankEnum] + source_type_1: BlankEnum if isinstance(_source_type_1, Unset): source_type_1 = UNSET else: @@ -922,41 +981,53 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: source = _parse_source(d.pop("source", UNSET)) - _reported_dt = d.pop("reported_dt", UNSET) - reported_dt: Union[Unset, None, datetime.datetime] - if _reported_dt is None: - reported_dt = None - elif isinstance(_reported_dt, Unset): - reported_dt = UNSET - else: - reported_dt = isoparse(_reported_dt) + def _parse_reported_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _reported_dt_type_0 = data + reported_dt_type_0: datetime.datetime + if isinstance(_reported_dt_type_0, Unset): + reported_dt_type_0 = UNSET + else: + reported_dt_type_0 = isoparse(_reported_dt_type_0) + + return reported_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + reported_dt = _parse_reported_dt(d.pop("reported_dt", UNSET)) mitigation = d.pop("mitigation", UNSET) - def _parse_major_incident_state( - data: object, - ) -> Union[BlankEnum, MajorIncidentStateEnum, Unset]: + def _parse_major_incident_state(data: object) -> Union[BlankEnum, MajorIncidentStateEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _major_incident_state_type_0 = data - major_incident_state_type_0: Union[Unset, MajorIncidentStateEnum] + major_incident_state_type_0: MajorIncidentStateEnum if isinstance(_major_incident_state_type_0, Unset): major_incident_state_type_0 = UNSET else: - major_incident_state_type_0 = MajorIncidentStateEnum( - _major_incident_state_type_0 - ) + major_incident_state_type_0 = MajorIncidentStateEnum(_major_incident_state_type_0) return major_incident_state_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _major_incident_state_type_1 = data - major_incident_state_type_1: Union[Unset, BlankEnum] + major_incident_state_type_1: BlankEnum if isinstance(_major_incident_state_type_1, Unset): major_incident_state_type_1 = UNSET else: @@ -964,43 +1035,53 @@ def _parse_major_incident_state( return major_incident_state_type_1 - major_incident_state = _parse_major_incident_state( - d.pop("major_incident_state", UNSET) - ) + major_incident_state = _parse_major_incident_state(d.pop("major_incident_state", UNSET)) - _major_incident_start_dt = d.pop("major_incident_start_dt", UNSET) - major_incident_start_dt: Union[Unset, None, datetime.datetime] - if _major_incident_start_dt is None: - major_incident_start_dt = None - elif isinstance(_major_incident_start_dt, Unset): - major_incident_start_dt = UNSET - else: - major_incident_start_dt = isoparse(_major_incident_start_dt) + def _parse_major_incident_start_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _major_incident_start_dt_type_0 = data + major_incident_start_dt_type_0: datetime.datetime + if isinstance(_major_incident_start_dt_type_0, Unset): + major_incident_start_dt_type_0 = UNSET + else: + major_incident_start_dt_type_0 = isoparse(_major_incident_start_dt_type_0) - def _parse_nist_cvss_validation( - data: object, - ) -> Union[BlankEnum, NistCvssValidationEnum, Unset]: + return major_incident_start_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + major_incident_start_dt = _parse_major_incident_start_dt(d.pop("major_incident_start_dt", UNSET)) + + def _parse_nist_cvss_validation(data: object) -> Union[BlankEnum, NistCvssValidationEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _nist_cvss_validation_type_0 = data - nist_cvss_validation_type_0: Union[Unset, NistCvssValidationEnum] + nist_cvss_validation_type_0: NistCvssValidationEnum if isinstance(_nist_cvss_validation_type_0, Unset): nist_cvss_validation_type_0 = UNSET else: - nist_cvss_validation_type_0 = NistCvssValidationEnum( - _nist_cvss_validation_type_0 - ) + nist_cvss_validation_type_0 = NistCvssValidationEnum(_nist_cvss_validation_type_0) return nist_cvss_validation_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _nist_cvss_validation_type_1 = data - nist_cvss_validation_type_1: Union[Unset, BlankEnum] + nist_cvss_validation_type_1: BlankEnum if isinstance(_nist_cvss_validation_type_1, Unset): nist_cvss_validation_type_1 = UNSET else: @@ -1008,9 +1089,7 @@ def _parse_nist_cvss_validation( return nist_cvss_validation_type_1 - nist_cvss_validation = _parse_nist_cvss_validation( - d.pop("nist_cvss_validation", UNSET) - ) + nist_cvss_validation = _parse_nist_cvss_validation(d.pop("nist_cvss_validation", UNSET)) group_key = d.pop("group_key", UNSET) @@ -1061,33 +1140,33 @@ def _parse_nist_cvss_validation( @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, "title": str, - "trackers": List[str], + "trackers": list[str], "comment_zero": str, - "affects": List[Affect], - "comments": List[Comment], - "package_versions": List[Package], - "acknowledgments": List[FlawAcknowledgment], - "references": List[FlawReference], - "cvss_scores": List[FlawCVSS], + "affects": list["Affect"], + "comments": list["Comment"], + "package_versions": list["Package"], + "acknowledgments": list["FlawAcknowledgment"], + "references": list["FlawReference"], + "cvss_scores": list["FlawCVSS"], "embargoed": bool, "created_dt": datetime.datetime, "classification": FlawPostClassification, - "alerts": List[Alert], - "cve_id": str, + "alerts": list["Alert"], + "cve_id": Union[None, str], "impact": Union[BlankEnum, ImpactEnum], - "components": List[str], + "components": list[str], "cve_description": str, "requires_cve_description": Union[BlankEnum, RequiresCveDescriptionEnum], "statement": str, "cwe_id": str, - "unembargo_dt": datetime.datetime, + "unembargo_dt": Union[None, datetime.datetime], "source": Union[BlankEnum, SourceBe0Enum], - "reported_dt": datetime.datetime, + "reported_dt": Union[None, datetime.datetime], "mitigation": str, "major_incident_state": Union[BlankEnum, MajorIncidentStateEnum], - "major_incident_start_dt": datetime.datetime, + "major_incident_start_dt": Union[None, datetime.datetime], "nist_cvss_validation": Union[BlankEnum, NistCvssValidationEnum], "group_key": str, "owner": str, @@ -1096,7 +1175,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_post_classification.py b/osidb_bindings/bindings/python_client/models/flaw_post_classification.py index 30d87c0..0505249 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_post_classification.py +++ b/osidb_bindings/bindings/python_client/models/flaw_post_classification.py @@ -1,6 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..models.flaw_post_classification_state import FlawPostClassificationState from ..types import UNSET, OSIDBModel, Unset @@ -8,22 +9,26 @@ T = TypeVar("T", bound="FlawPostClassification") -@attr.s(auto_attribs=True) +@_attrs_define class FlawPostClassification(OSIDBModel): - """ """ + """ + Attributes: + workflow (Union[Unset, str]): + state (Union[Unset, FlawPostClassificationState]): + """ workflow: Union[Unset, str] = UNSET state: Union[Unset, FlawPostClassificationState] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: workflow = self.workflow + state: Union[Unset, str] = UNSET if not isinstance(self.state, Unset): - state = FlawPostClassificationState(self.state).value - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(workflow, Unset): field_dict["workflow"] = workflow @@ -33,10 +38,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() workflow = d.pop("workflow", UNSET) + # } _state = d.pop("state", UNSET) state: Union[Unset, FlawPostClassificationState] if isinstance(_state, Unset): @@ -60,7 +66,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_post_classification_state.py b/osidb_bindings/bindings/python_client/models/flaw_post_classification_state.py index 6549611..2d2140e 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_post_classification_state.py +++ b/osidb_bindings/bindings/python_client/models/flaw_post_classification_state.py @@ -2,13 +2,13 @@ class FlawPostClassificationState(str, Enum): - VALUE_0 = "" + DONE = "DONE" NEW = "NEW" - TRIAGE = "TRIAGE" PRE_SECONDARY_ASSESSMENT = "PRE_SECONDARY_ASSESSMENT" - SECONDARY_ASSESSMENT = "SECONDARY_ASSESSMENT" - DONE = "DONE" REJECTED = "REJECTED" + SECONDARY_ASSESSMENT = "SECONDARY_ASSESSMENT" + TRIAGE = "TRIAGE" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/flaw_reference.py b/osidb_bindings/bindings/python_client/models/flaw_reference.py index ea481d8..80c8973 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_reference.py +++ b/osidb_bindings/bindings/python_client/models/flaw_reference.py @@ -1,41 +1,68 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.flaw_reference_type import FlawReferenceType from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="FlawReference") -@attr.s(auto_attribs=True) +@_attrs_define class FlawReference(OSIDBModel): - """FlawReference serializer""" - - flaw: str + """FlawReference serializer + + Attributes: + flaw (UUID): + url (str): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + description (Union[Unset, str]): + type_ (Union[Unset, FlawReferenceType]): + """ + + flaw: UUID url: str - uuid: str + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime description: Union[Unset, str] = UNSET - type: Union[Unset, FlawReferenceType] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + type_: Union[Unset, FlawReferenceType] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) - def to_dict(self) -> Dict[str, Any]: - flaw = self.flaw url = self.url - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -50,12 +77,12 @@ def to_dict(self) -> Dict[str, Any]: updated_dt = self.updated_dt.isoformat() description = self.description - type: Union[Unset, str] = UNSET - if not isinstance(self.type, Unset): - type = FlawReferenceType(self.type).value + type_: Union[Unset, str] = UNSET + if not isinstance(self.type_, Unset): + type_ = FlawReferenceType(self.type_).value - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(flaw, Unset): field_dict["flaw"] = flaw @@ -73,37 +100,50 @@ def to_dict(self) -> Dict[str, Any]: field_dict["updated_dt"] = updated_dt if not isinstance(description, Unset): field_dict["description"] = description - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) url = d.pop("url", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -111,6 +151,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -120,12 +161,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: description = d.pop("description", UNSET) - _type = d.pop("type", UNSET) - type: Union[Unset, FlawReferenceType] - if isinstance(_type, Unset): - type = UNSET + # } + _type_ = d.pop("type", UNSET) + type_: Union[Unset, FlawReferenceType] + if isinstance(_type_, Unset): + type_ = UNSET else: - type = FlawReferenceType(_type) + type_ = FlawReferenceType(_type_) flaw_reference = cls( flaw=flaw, @@ -136,7 +178,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: created_dt=created_dt, updated_dt=updated_dt, description=description, - type=type, + type_=type_, ) flaw_reference.additional_properties = d @@ -145,11 +187,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "flaw": str, + "flaw": UUID, "url": str, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "description": str, @@ -157,7 +199,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_reference_post.py b/osidb_bindings/bindings/python_client/models/flaw_reference_post.py index 2f61d82..8e23d1b 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_reference_post.py +++ b/osidb_bindings/bindings/python_client/models/flaw_reference_post.py @@ -1,39 +1,60 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.flaw_reference_type import FlawReferenceType from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="FlawReferencePost") -@attr.s(auto_attribs=True) +@_attrs_define class FlawReferencePost(OSIDBModel): - """FlawReference serializer""" + """FlawReference serializer + + Attributes: + url (str): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + description (Union[Unset, str]): + type_ (Union[Unset, FlawReferenceType]): + """ url: str - uuid: str + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime description: Union[Unset, str] = UNSET - type: Union[Unset, FlawReferenceType] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + type_: Union[Unset, FlawReferenceType] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: url = self.url - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -44,12 +65,12 @@ def to_dict(self) -> Dict[str, Any]: created_dt = self.created_dt.isoformat() description = self.description - type: Union[Unset, str] = UNSET - if not isinstance(self.type, Unset): - type = FlawReferenceType(self.type).value + type_: Union[Unset, str] = UNSET + if not isinstance(self.type_, Unset): + type_ = FlawReferenceType(self.type_).value - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(url, Unset): field_dict["url"] = url @@ -63,51 +84,50 @@ def to_dict(self) -> Dict[str, Any]: field_dict["created_dt"] = created_dt if not isinstance(description, Unset): field_dict["description"] = description - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ return field_dict - def to_multipart(self) -> Dict[str, Any]: - url = self.url if self.url is UNSET else (None, str(self.url), "text/plain") - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + url = (None, str(self.url).encode(), "text/plain") + + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() description = ( self.description - if self.description is UNSET - else (None, str(self.description), "text/plain") + if isinstance(self.description, Unset) + else (None, str(self.description).encode(), "text/plain") ) - type: Union[Unset, Tuple[None, str, str]] = UNSET - if not isinstance(self.type, Unset): - type = FlawReferenceType(self.type).value + type_: Union[Unset, tuple[None, bytes, str]] = UNSET + if not isinstance(self.type_, Unset): + type_ = (None, str(self.type_.value).encode(), "text/plain") + # CHANGE END (3) #} + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(url, Unset): field_dict["url"] = url if not isinstance(uuid, Unset): @@ -120,35 +140,42 @@ def to_multipart(self) -> Dict[str, Any]: field_dict["created_dt"] = created_dt if not isinstance(description, Unset): field_dict["description"] = description - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() url = d.pop("url", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -158,12 +185,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: description = d.pop("description", UNSET) - _type = d.pop("type", UNSET) - type: Union[Unset, FlawReferenceType] - if isinstance(_type, Unset): - type = UNSET + # } + _type_ = d.pop("type", UNSET) + type_: Union[Unset, FlawReferenceType] + if isinstance(_type_, Unset): + type_ = UNSET else: - type = FlawReferenceType(_type) + type_ = FlawReferenceType(_type_) flaw_reference_post = cls( url=url, @@ -172,7 +200,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts=alerts, created_dt=created_dt, description=description, - type=type, + type_=type_, ) flaw_reference_post.additional_properties = d @@ -182,16 +210,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "url": str, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "description": str, "type": FlawReferenceType, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_reference_put.py b/osidb_bindings/bindings/python_client/models/flaw_reference_put.py index 9fafd75..626f3fe 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_reference_put.py +++ b/osidb_bindings/bindings/python_client/models/flaw_reference_put.py @@ -1,40 +1,63 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.flaw_reference_type import FlawReferenceType from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="FlawReferencePut") -@attr.s(auto_attribs=True) +@_attrs_define class FlawReferencePut(OSIDBModel): - """FlawReference serializer""" + """FlawReference serializer + + Attributes: + url (str): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + description (Union[Unset, str]): + type_ (Union[Unset, FlawReferenceType]): + """ url: str - uuid: str + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime description: Union[Unset, str] = UNSET - type: Union[Unset, FlawReferenceType] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + type_: Union[Unset, FlawReferenceType] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: url = self.url - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -49,12 +72,12 @@ def to_dict(self) -> Dict[str, Any]: updated_dt = self.updated_dt.isoformat() description = self.description - type: Union[Unset, str] = UNSET - if not isinstance(self.type, Unset): - type = FlawReferenceType(self.type).value + type_: Union[Unset, str] = UNSET + if not isinstance(self.type_, Unset): + type_ = FlawReferenceType(self.type_).value - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(url, Unset): field_dict["url"] = url @@ -70,55 +93,54 @@ def to_dict(self) -> Dict[str, Any]: field_dict["updated_dt"] = updated_dt if not isinstance(description, Unset): field_dict["description"] = description - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ return field_dict - def to_multipart(self) -> Dict[str, Any]: - url = self.url if self.url is UNSET else (None, str(self.url), "text/plain") - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + url = (None, str(self.url).encode(), "text/plain") + + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() - updated_dt: str = UNSET + updated_dt: bytes = UNSET if not isinstance(self.updated_dt, Unset): - updated_dt = self.updated_dt.isoformat() + updated_dt = self.updated_dt.isoformat().encode() description = ( self.description - if self.description is UNSET - else (None, str(self.description), "text/plain") + if isinstance(self.description, Unset) + else (None, str(self.description).encode(), "text/plain") ) - type: Union[Unset, Tuple[None, str, str]] = UNSET - if not isinstance(self.type, Unset): - type = FlawReferenceType(self.type).value + type_: Union[Unset, tuple[None, bytes, str]] = UNSET + if not isinstance(self.type_, Unset): + type_ = (None, str(self.type_.value).encode(), "text/plain") + # CHANGE END (3) #} + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(url, Unset): field_dict["url"] = url if not isinstance(uuid, Unset): @@ -133,35 +155,42 @@ def to_multipart(self) -> Dict[str, Any]: field_dict["updated_dt"] = updated_dt if not isinstance(description, Unset): field_dict["description"] = description - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() url = d.pop("url", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -169,6 +198,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -178,12 +208,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: description = d.pop("description", UNSET) - _type = d.pop("type", UNSET) - type: Union[Unset, FlawReferenceType] - if isinstance(_type, Unset): - type = UNSET + # } + _type_ = d.pop("type", UNSET) + type_: Union[Unset, FlawReferenceType] + if isinstance(_type_, Unset): + type_ = UNSET else: - type = FlawReferenceType(_type) + type_ = FlawReferenceType(_type_) flaw_reference_put = cls( url=url, @@ -193,7 +224,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: created_dt=created_dt, updated_dt=updated_dt, description=description, - type=type, + type_=type_, ) flaw_reference_put.additional_properties = d @@ -203,9 +234,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "url": str, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "description": str, @@ -213,7 +244,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_report_data.py b/osidb_bindings/bindings/python_client/models/flaw_report_data.py index ce0fa82..1792b3f 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_report_data.py +++ b/osidb_bindings/bindings/python_client/models/flaw_report_data.py @@ -1,34 +1,47 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.affect_report_data import AffectReportData from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect_report_data import AffectReportData + + T = TypeVar("T", bound="FlawReportData") -@attr.s(auto_attribs=True) +@_attrs_define class FlawReportData(OSIDBModel): - """ """ - - cve_id: Union[Unset, None, str] = UNSET - affects: Union[Unset, List[AffectReportData]] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + """ + Attributes: + cve_id (Union[None, Unset, str]): + affects (Union[Unset, list['AffectReportData']]): + """ + + cve_id: Union[None, Unset, str] = UNSET + affects: Union[Unset, list["AffectReportData"]] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + cve_id: Union[None, Unset, str] + if isinstance(self.cve_id, Unset): + cve_id = UNSET + else: + cve_id = self.cve_id - def to_dict(self) -> Dict[str, Any]: - cve_id = self.cve_id - affects: Union[Unset, List[Dict[str, Any]]] = UNSET + affects: Union[Unset, list[dict[str, Any]]] = UNSET if not isinstance(self.affects, Unset): affects = [] for affects_item_data in self.affects: - affects_item: Dict[str, Any] = UNSET + affects_item: dict[str, Any] = UNSET if not isinstance(affects_item_data, Unset): affects_item = affects_item_data.to_dict() affects.append(affects_item) - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cve_id, Unset): field_dict["cve_id"] = cve_id @@ -38,24 +51,32 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect_report_data import AffectReportData + d = src_dict.copy() - cve_id = d.pop("cve_id", UNSET) + + def _parse_cve_id(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + cve_id = _parse_cve_id(d.pop("cve_id", UNSET)) affects = [] _affects = d.pop("affects", UNSET) - if _affects is UNSET: - affects = UNSET - else: - for affects_item_data in _affects or []: - _affects_item = affects_item_data - affects_item: AffectReportData - if isinstance(_affects_item, Unset): - affects_item = UNSET - else: - affects_item = AffectReportData.from_dict(_affects_item) + for affects_item_data in _affects or []: + # } + _affects_item = affects_item_data + affects_item: AffectReportData + if isinstance(_affects_item, Unset): + affects_item = UNSET + else: + affects_item = AffectReportData.from_dict(_affects_item) - affects.append(affects_item) + affects.append(affects_item) flaw_report_data = cls( cve_id=cve_id, @@ -68,12 +89,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "cve_id": str, - "affects": List[AffectReportData], + "cve_id": Union[None, str], + "affects": list["AffectReportData"], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_uuid_list.py b/osidb_bindings/bindings/python_client/models/flaw_uuid_list.py index 85d8933..c79cd3c 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_uuid_list.py +++ b/osidb_bindings/bindings/python_client/models/flaw_uuid_list.py @@ -1,54 +1,79 @@ import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union, cast +from typing import Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import UNSET, OSIDBModel, Unset T = TypeVar("T", bound="FlawUUIDList") -@attr.s(auto_attribs=True) +@_attrs_define class FlawUUIDList(OSIDBModel): - """ """ + """ + Attributes: + flaw_uuids (list[UUID]): + """ - flaw_uuids: List[str] - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + flaw_uuids: list[UUID] + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - flaw_uuids: List[str] = UNSET + def to_dict(self) -> dict[str, Any]: + flaw_uuids: list[str] = UNSET if not isinstance(self.flaw_uuids, Unset): - flaw_uuids = self.flaw_uuids + flaw_uuids = [] + for flaw_uuids_item_data in self.flaw_uuids: + flaw_uuids_item: str = UNSET + if not isinstance(flaw_uuids_item_data, Unset): + flaw_uuids_item = str(flaw_uuids_item_data) - field_dict: Dict[str, Any] = {} + flaw_uuids.append(flaw_uuids_item) + + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(flaw_uuids, Unset): field_dict["flaw_uuids"] = flaw_uuids return field_dict - def to_multipart(self) -> Dict[str, Any]: - flaw_uuids: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + flaw_uuids: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.flaw_uuids, Unset): - _temp_flaw_uuids = self.flaw_uuids - flaw_uuids = (None, json.dumps(_temp_flaw_uuids), "application/json") - - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) + _temp_flaw_uuids = [] + for flaw_uuids_item_data in self.flaw_uuids: + flaw_uuids_item: str = UNSET + if not isinstance(flaw_uuids_item_data, Unset): + flaw_uuids_item = str(flaw_uuids_item_data) + + _temp_flaw_uuids.append(flaw_uuids_item) + flaw_uuids = (None, json.dumps(_temp_flaw_uuids).encode(), "application/json") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") + if not isinstance(flaw_uuids, Unset): field_dict["flaw_uuids"] = flaw_uuids return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() - flaw_uuids = cast(List[str], d.pop("flaw_uuids", UNSET)) + flaw_uuids = [] + _flaw_uuids = d.pop("flaw_uuids", UNSET) + for flaw_uuids_item_data in _flaw_uuids or []: + # } + _flaw_uuids_item = flaw_uuids_item_data + flaw_uuids_item: UUID + if isinstance(_flaw_uuids_item, Unset): + flaw_uuids_item = UNSET + else: + flaw_uuids_item = UUID(_flaw_uuids_item) + + flaw_uuids.append(flaw_uuids_item) flaw_uuid_list = cls( flaw_uuids=flaw_uuids, @@ -60,11 +85,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "flaw_uuids": List[str], + "flaw_uuids": list[UUID], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/flaw_version.py b/osidb_bindings/bindings/python_client/models/flaw_version.py index 0438341..13beaa5 100644 --- a/osidb_bindings/bindings/python_client/models/flaw_version.py +++ b/osidb_bindings/bindings/python_client/models/flaw_version.py @@ -1,23 +1,28 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import UNSET, OSIDBModel, Unset T = TypeVar("T", bound="FlawVersion") -@attr.s(auto_attribs=True) +@_attrs_define class FlawVersion(OSIDBModel): - """PackageVer serializer used by FlawPackageVersionSerializer.""" + """PackageVer serializer used by FlawPackageVersionSerializer. + + Attributes: + version (str): + """ version: str - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(version, Unset): field_dict["version"] = version @@ -25,7 +30,7 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() version = d.pop("version", UNSET) @@ -43,7 +48,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/impact_enum.py b/osidb_bindings/bindings/python_client/models/impact_enum.py index bcfb004..61c9b9a 100644 --- a/osidb_bindings/bindings/python_client/models/impact_enum.py +++ b/osidb_bindings/bindings/python_client/models/impact_enum.py @@ -2,10 +2,10 @@ class ImpactEnum(str, Enum): + CRITICAL = "CRITICAL" + IMPORTANT = "IMPORTANT" LOW = "LOW" MODERATE = "MODERATE" - IMPORTANT = "IMPORTANT" - CRITICAL = "CRITICAL" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/issuer_enum.py b/osidb_bindings/bindings/python_client/models/issuer_enum.py index ef01f1a..9ace496 100644 --- a/osidb_bindings/bindings/python_client/models/issuer_enum.py +++ b/osidb_bindings/bindings/python_client/models/issuer_enum.py @@ -3,9 +3,9 @@ class IssuerEnum(str, Enum): CVEORG = "CVEORG" - RH = "RH" NIST = "NIST" OSV = "OSV" + RH = "RH" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/major_incident_state_enum.py b/osidb_bindings/bindings/python_client/models/major_incident_state_enum.py index f548bcd..f00a1ad 100644 --- a/osidb_bindings/bindings/python_client/models/major_incident_state_enum.py +++ b/osidb_bindings/bindings/python_client/models/major_incident_state_enum.py @@ -2,13 +2,13 @@ class MajorIncidentStateEnum(str, Enum): - REQUESTED = "REQUESTED" - REJECTED = "REJECTED" APPROVED = "APPROVED" CISA_APPROVED = "CISA_APPROVED" + INVALID = "INVALID" MINOR = "MINOR" + REJECTED = "REJECTED" + REQUESTED = "REQUESTED" ZERO_DAY = "ZERO_DAY" - INVALID = "INVALID" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/module_component.py b/osidb_bindings/bindings/python_client/models/module_component.py index d8958cc..75df7c2 100644 --- a/osidb_bindings/bindings/python_client/models/module_component.py +++ b/osidb_bindings/bindings/python_client/models/module_component.py @@ -1,44 +1,58 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.affect import Affect -from ..models.ps_stream_selection import PsStreamSelection from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect import Affect + from ..models.ps_stream_selection import PsStreamSelection + + T = TypeVar("T", bound="ModuleComponent") -@attr.s(auto_attribs=True) +@_attrs_define class ModuleComponent(OSIDBModel): - """ """ + """ + Attributes: + ps_module (str): + ps_component (str): + streams (list['PsStreamSelection']): + selected (bool): + affect (Affect): Affect serializer + """ ps_module: str ps_component: str - streams: List[PsStreamSelection] + streams: list["PsStreamSelection"] selected: bool - affect: Affect - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + affect: "Affect" + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: ps_module = self.ps_module + ps_component = self.ps_component - streams: List[Dict[str, Any]] = UNSET + + streams: list[dict[str, Any]] = UNSET if not isinstance(self.streams, Unset): streams = [] for streams_item_data in self.streams: - streams_item: Dict[str, Any] = UNSET + streams_item: dict[str, Any] = UNSET if not isinstance(streams_item_data, Unset): streams_item = streams_item_data.to_dict() streams.append(streams_item) selected = self.selected - affect: Dict[str, Any] = UNSET + + affect: dict[str, Any] = UNSET if not isinstance(self.affect, Unset): affect = self.affect.to_dict() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(ps_module, Unset): field_dict["ps_module"] = ps_module @@ -54,7 +68,10 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + from ..models.ps_stream_selection import PsStreamSelection + d = src_dict.copy() ps_module = d.pop("ps_module", UNSET) @@ -62,21 +79,20 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: streams = [] _streams = d.pop("streams", UNSET) - if _streams is UNSET: - streams = UNSET - else: - for streams_item_data in _streams or []: - _streams_item = streams_item_data - streams_item: PsStreamSelection - if isinstance(_streams_item, Unset): - streams_item = UNSET - else: - streams_item = PsStreamSelection.from_dict(_streams_item) + for streams_item_data in _streams or []: + # } + _streams_item = streams_item_data + streams_item: PsStreamSelection + if isinstance(_streams_item, Unset): + streams_item = UNSET + else: + streams_item = PsStreamSelection.from_dict(_streams_item) - streams.append(streams_item) + streams.append(streams_item) selected = d.pop("selected", UNSET) + # } _affect = d.pop("affect", UNSET) affect: Affect if isinstance(_affect, Unset): @@ -100,13 +116,13 @@ def get_fields(): return { "ps_module": str, "ps_component": str, - "streams": List[PsStreamSelection], + "streams": list["PsStreamSelection"], "selected": bool, "affect": Affect, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/nist_cvss_validation_enum.py b/osidb_bindings/bindings/python_client/models/nist_cvss_validation_enum.py index e12cfda..c51009c 100644 --- a/osidb_bindings/bindings/python_client/models/nist_cvss_validation_enum.py +++ b/osidb_bindings/bindings/python_client/models/nist_cvss_validation_enum.py @@ -2,9 +2,9 @@ class NistCvssValidationEnum(str, Enum): - REQUESTED = "REQUESTED" APPROVED = "APPROVED" REJECTED = "REJECTED" + REQUESTED = "REQUESTED" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_bulk_create_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_bulk_create_response_200.py index 0c19cb4..a322185 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_bulk_create_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_bulk_create_response_200.py @@ -1,32 +1,43 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect import Affect from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect import Affect + + T = TypeVar("T", bound="OsidbApiV1AffectsBulkCreateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsBulkCreateResponse200(OSIDBModel): - """ """ - - results: List[Affect] + """ + Attributes: + results (list['Affect']): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + results: list["Affect"] dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - results: List[Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() @@ -37,10 +48,12 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(results, Unset): field_dict["results"] = results @@ -56,23 +69,24 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + d = src_dict.copy() results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Affect - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Affect.from_dict(_results_item) - - results.append(results_item) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Affect + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Affect.from_dict(_results_item) + + results.append(results_item) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -100,7 +114,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "results": List[Affect], + "results": list["Affect"], "dt": datetime.datetime, "env": str, "revision": str, @@ -108,7 +122,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_bulk_destroy_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_bulk_destroy_response_200.py index 550ece8..ed4bd80 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_bulk_destroy_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_bulk_destroy_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="OsidbApiV1AffectsBulkDestroyResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsBulkDestroyResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_bulk_update_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_bulk_update_response_200.py index 54aa093..912ec80 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_bulk_update_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_bulk_update_response_200.py @@ -1,32 +1,43 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect import Affect from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect import Affect + + T = TypeVar("T", bound="OsidbApiV1AffectsBulkUpdateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsBulkUpdateResponse200(OSIDBModel): - """ """ - - results: List[Affect] + """ + Attributes: + results (list['Affect']): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + results: list["Affect"] dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - results: List[Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() @@ -37,10 +48,12 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(results, Unset): field_dict["results"] = results @@ -56,23 +69,24 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + d = src_dict.copy() results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Affect - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Affect.from_dict(_results_item) - - results.append(results_item) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Affect + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Affect.from_dict(_results_item) + + results.append(results_item) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -100,7 +114,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "results": List[Affect], + "results": list["Affect"], "dt": datetime.datetime, "env": str, "revision": str, @@ -108,7 +122,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_create_response_201.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_create_response_201.py index 065e332..fc566e0 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_create_response_201.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_create_response_201.py @@ -1,78 +1,123 @@ import datetime -from typing import Any, Dict, List, Optional, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect_cvss import AffectCVSS from ..models.affectedness_enum import AffectednessEnum -from ..models.alert import Alert from ..models.blank_enum import BlankEnum from ..models.impact_enum import ImpactEnum from ..models.resolution_enum import ResolutionEnum -from ..models.tracker import Tracker from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect_cvss import AffectCVSS + from ..models.alert import Alert + from ..models.tracker import Tracker + + T = TypeVar("T", bound="OsidbApiV1AffectsCreateResponse201") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsCreateResponse201(OSIDBModel): - """ """ - - uuid: str + """ + Attributes: + uuid (UUID): + flaw (Union[None, UUID]): + ps_module (str): + ps_product (str): + trackers (list['Tracker']): + delegated_resolution (str): + cvss_scores (list['AffectCVSS']): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affectedness (Union[AffectednessEnum, BlankEnum, Unset]): + resolution (Union[BlankEnum, ResolutionEnum, Unset]): + ps_component (Union[None, Unset, str]): + impact (Union[BlankEnum, ImpactEnum, Unset]): + purl (Union[None, Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + uuid: UUID + flaw: Union[None, UUID] ps_module: str ps_product: str - trackers: List[Tracker] + trackers: list["Tracker"] delegated_resolution: str - cvss_scores: List[AffectCVSS] + cvss_scores: list["AffectCVSS"] embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - flaw: Optional[str] affectedness: Union[AffectednessEnum, BlankEnum, Unset] = UNSET resolution: Union[BlankEnum, ResolutionEnum, Unset] = UNSET - ps_component: Union[Unset, None, str] = UNSET + ps_component: Union[None, Unset, str] = UNSET impact: Union[BlankEnum, ImpactEnum, Unset] = UNSET - purl: Union[Unset, None, str] = UNSET + purl: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + flaw: Union[None, str] + if isinstance(self.flaw, UUID): + flaw = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + else: + flaw = self.flaw - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid ps_module = self.ps_module + ps_product = self.ps_product - trackers: List[Dict[str, Any]] = UNSET + + trackers: list[dict[str, Any]] = UNSET if not isinstance(self.trackers, Unset): trackers = [] for trackers_item_data in self.trackers: - trackers_item: Dict[str, Any] = UNSET + trackers_item: dict[str, Any] = UNSET if not isinstance(trackers_item_data, Unset): trackers_item = trackers_item_data.to_dict() trackers.append(trackers_item) delegated_resolution = self.delegated_resolution - cvss_scores: List[Dict[str, Any]] = UNSET + + cvss_scores: list[dict[str, Any]] = UNSET if not isinstance(self.cvss_scores, Unset): cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() cvss_scores.append(cvss_scores_item) embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -86,20 +131,17 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - flaw = self.flaw affectedness: Union[Unset, str] if isinstance(self.affectedness, Unset): affectedness = UNSET elif isinstance(self.affectedness, AffectednessEnum): affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = AffectednessEnum(self.affectedness).value else: affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = BlankEnum(self.affectedness).value resolution: Union[Unset, str] @@ -108,44 +150,54 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.resolution, ResolutionEnum): resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = ResolutionEnum(self.resolution).value else: resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = BlankEnum(self.resolution).value - ps_component = self.ps_component + ps_component: Union[None, Unset, str] + if isinstance(self.ps_component, Unset): + ps_component = UNSET + else: + ps_component = self.ps_component + impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): impact = UNSET if not isinstance(self.impact, Unset): - impact = ImpactEnum(self.impact).value else: impact = UNSET if not isinstance(self.impact, Unset): - impact = BlankEnum(self.impact).value - purl = self.purl + purl: Union[None, Unset, str] + if isinstance(self.purl, Unset): + purl = UNSET + else: + purl = self.purl + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid + if not isinstance(flaw, Unset): + field_dict["flaw"] = flaw if not isinstance(ps_module, Unset): field_dict["ps_module"] = ps_module if not isinstance(ps_product, Unset): @@ -164,8 +216,6 @@ def to_dict(self) -> Dict[str, Any]: field_dict["created_dt"] = created_dt if not isinstance(updated_dt, Unset): field_dict["updated_dt"] = updated_dt - if not isinstance(flaw, Unset): - field_dict["flaw"] = flaw if not isinstance(affectedness, Unset): field_dict["affectedness"] = affectedness if not isinstance(resolution, Unset): @@ -188,9 +238,40 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect_cvss import AffectCVSS + from ..models.alert import Alert + from ..models.tracker import Tracker + d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) + + def _parse_flaw(data: object) -> Union[None, UUID]: + if data is None: + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _flaw_type_0 = data + flaw_type_0: UUID + if isinstance(_flaw_type_0, Unset): + flaw_type_0 = UNSET + else: + flaw_type_0 = UUID(_flaw_type_0) + + return flaw_type_0 + except: # noqa: E722 + pass + return cast(Union[None, UUID], data) + + flaw = _parse_flaw(d.pop("flaw", UNSET)) ps_module = d.pop("ps_module", UNSET) @@ -198,53 +279,48 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: trackers = [] _trackers = d.pop("trackers", UNSET) - if _trackers is UNSET: - trackers = UNSET - else: - for trackers_item_data in _trackers or []: - _trackers_item = trackers_item_data - trackers_item: Tracker - if isinstance(_trackers_item, Unset): - trackers_item = UNSET - else: - trackers_item = Tracker.from_dict(_trackers_item) + for trackers_item_data in _trackers or []: + # } + _trackers_item = trackers_item_data + trackers_item: Tracker + if isinstance(_trackers_item, Unset): + trackers_item = UNSET + else: + trackers_item = Tracker.from_dict(_trackers_item) - trackers.append(trackers_item) + trackers.append(trackers_item) delegated_resolution = d.pop("delegated_resolution", UNSET) cvss_scores = [] _cvss_scores = d.pop("cvss_scores", UNSET) - if _cvss_scores is UNSET: - cvss_scores = UNSET - else: - for cvss_scores_item_data in _cvss_scores or []: - _cvss_scores_item = cvss_scores_item_data - cvss_scores_item: AffectCVSS - if isinstance(_cvss_scores_item, Unset): - cvss_scores_item = UNSET - else: - cvss_scores_item = AffectCVSS.from_dict(_cvss_scores_item) + for cvss_scores_item_data in _cvss_scores or []: + # } + _cvss_scores_item = cvss_scores_item_data + cvss_scores_item: AffectCVSS + if isinstance(_cvss_scores_item, Unset): + cvss_scores_item = UNSET + else: + cvss_scores_item = AffectCVSS.from_dict(_cvss_scores_item) - cvss_scores.append(cvss_scores_item) + cvss_scores.append(cvss_scores_item) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) - alerts.append(alerts_item) + alerts.append(alerts_item) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -252,6 +328,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -259,18 +336,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - flaw = d.pop("flaw", UNSET) - - def _parse_affectedness( - data: object, - ) -> Union[AffectednessEnum, BlankEnum, Unset]: + def _parse_affectedness(data: object) -> Union[AffectednessEnum, BlankEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _affectedness_type_0 = data - affectedness_type_0: Union[Unset, AffectednessEnum] + affectedness_type_0: AffectednessEnum if isinstance(_affectedness_type_0, Unset): affectedness_type_0 = UNSET else: @@ -281,8 +355,9 @@ def _parse_affectedness( pass if not isinstance(data, str): raise TypeError() + # } _affectedness_type_1 = data - affectedness_type_1: Union[Unset, BlankEnum] + affectedness_type_1: BlankEnum if isinstance(_affectedness_type_1, Unset): affectedness_type_1 = UNSET else: @@ -298,8 +373,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _resolution_type_0 = data - resolution_type_0: Union[Unset, ResolutionEnum] + resolution_type_0: ResolutionEnum if isinstance(_resolution_type_0, Unset): resolution_type_0 = UNSET else: @@ -310,8 +386,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _resolution_type_1 = data - resolution_type_1: Union[Unset, BlankEnum] + resolution_type_1: BlankEnum if isinstance(_resolution_type_1, Unset): resolution_type_1 = UNSET else: @@ -321,7 +398,14 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: resolution = _parse_resolution(d.pop("resolution", UNSET)) - ps_component = d.pop("ps_component", UNSET) + def _parse_ps_component(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + ps_component = _parse_ps_component(d.pop("ps_component", UNSET)) def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: if isinstance(data, Unset): @@ -329,8 +413,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _impact_type_0 = data - impact_type_0: Union[Unset, ImpactEnum] + impact_type_0: ImpactEnum if isinstance(_impact_type_0, Unset): impact_type_0 = UNSET else: @@ -341,8 +426,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _impact_type_1 = data - impact_type_1: Union[Unset, BlankEnum] + impact_type_1: BlankEnum if isinstance(_impact_type_1, Unset): impact_type_1 = UNSET else: @@ -352,8 +438,16 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: impact = _parse_impact(d.pop("impact", UNSET)) - purl = d.pop("purl", UNSET) + def _parse_purl(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + purl = _parse_purl(d.pop("purl", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -369,6 +463,7 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: osidb_api_v1_affects_create_response_201 = cls( uuid=uuid, + flaw=flaw, ps_module=ps_module, ps_product=ps_product, trackers=trackers, @@ -378,7 +473,6 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: alerts=alerts, created_dt=created_dt, updated_dt=updated_dt, - flaw=flaw, affectedness=affectedness, resolution=resolution, ps_component=ps_component, @@ -396,22 +490,22 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, + "flaw": Union[None, UUID], "ps_module": str, "ps_product": str, - "trackers": List[Tracker], + "trackers": list["Tracker"], "delegated_resolution": str, - "cvss_scores": List[AffectCVSS], + "cvss_scores": list["AffectCVSS"], "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "flaw": str, "affectedness": Union[AffectednessEnum, BlankEnum], "resolution": Union[BlankEnum, ResolutionEnum], - "ps_component": str, + "ps_component": Union[None, str], "impact": Union[BlankEnum, ImpactEnum], - "purl": str, + "purl": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -419,7 +513,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_create_response_201.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_create_response_201.py index c177ee7..2b3764c 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_create_response_201.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_create_response_201.py @@ -1,58 +1,86 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.cvss_version_enum import CvssVersionEnum from ..models.issuer_enum import IssuerEnum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1AffectsCvssScoresCreateResponse201") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsCvssScoresCreateResponse201(OSIDBModel): - """ """ + """ + Attributes: + cvss_version (CvssVersionEnum): + issuer (IssuerEnum): + score (float): + uuid (UUID): + vector (str): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affect (Union[Unset, UUID]): + comment (Union[None, Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ cvss_version: CvssVersionEnum issuer: IssuerEnum score: float - uuid: str + uuid: UUID vector: str embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - affect: Union[Unset, str] = UNSET - comment: Union[Unset, None, str] = UNSET + affect: Union[Unset, UUID] = UNSET + comment: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cvss_version: str = UNSET if not isinstance(self.cvss_version, Unset): - cvss_version = CvssVersionEnum(self.cvss_version).value issuer: str = UNSET if not isinstance(self.issuer, Unset): - issuer = IssuerEnum(self.issuer).value score = self.score - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + vector = self.vector + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -66,17 +94,27 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - affect = self.affect - comment = self.comment + affect: Union[Unset, str] = UNSET + if not isinstance(self.affect, Unset): + affect = str(self.affect) + + comment: Union[None, Unset, str] + if isinstance(self.comment, Unset): + comment = UNSET + else: + comment = self.comment + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version @@ -112,8 +150,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() + # } _cvss_version = d.pop("cvss_version", UNSET) cvss_version: CvssVersionEnum if isinstance(_cvss_version, Unset): @@ -121,6 +162,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cvss_version = CvssVersionEnum(_cvss_version) + # } _issuer = d.pop("issuer", UNSET) issuer: IssuerEnum if isinstance(_issuer, Unset): @@ -130,7 +172,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: score = d.pop("score", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) vector = d.pop("vector", UNSET) @@ -138,19 +186,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -158,6 +205,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -165,10 +213,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - affect = d.pop("affect", UNSET) + # } + _affect = d.pop("affect", UNSET) + affect: Union[Unset, UUID] + if isinstance(_affect, Unset): + affect = UNSET + else: + affect = UUID(_affect) + + def _parse_comment(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) - comment = d.pop("comment", UNSET) + comment = _parse_comment(d.pop("comment", UNSET)) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -209,14 +271,14 @@ def get_fields(): "cvss_version": CvssVersionEnum, "issuer": IssuerEnum, "score": float, - "uuid": str, + "uuid": UUID, "vector": str, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "affect": str, - "comment": str, + "affect": UUID, + "comment": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -224,7 +286,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_destroy_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_destroy_response_200.py index 489d815..0bca607 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_destroy_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_destroy_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="OsidbApiV1AffectsCvssScoresDestroyResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsCvssScoresDestroyResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_list_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_list_response_200.py index 4ed052b..cc1a300 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect_cvss import AffectCVSS from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect_cvss import AffectCVSS + + T = TypeVar("T", bound="OsidbApiV1AffectsCvssScoresListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsCvssScoresListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['AffectCVSS']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[AffectCVSS] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["AffectCVSS"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect_cvss import AffectCVSS + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: AffectCVSS - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = AffectCVSS.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: AffectCVSS + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = AffectCVSS.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[AffectCVSS], - "next": str, - "previous": str, + "results": list["AffectCVSS"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_retrieve_response_200.py index a2f597e..ee629d2 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_retrieve_response_200.py @@ -1,58 +1,86 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.cvss_version_enum import CvssVersionEnum from ..models.issuer_enum import IssuerEnum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1AffectsCvssScoresRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsCvssScoresRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + cvss_version (CvssVersionEnum): + issuer (IssuerEnum): + score (float): + uuid (UUID): + vector (str): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affect (Union[Unset, UUID]): + comment (Union[None, Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ cvss_version: CvssVersionEnum issuer: IssuerEnum score: float - uuid: str + uuid: UUID vector: str embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - affect: Union[Unset, str] = UNSET - comment: Union[Unset, None, str] = UNSET + affect: Union[Unset, UUID] = UNSET + comment: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cvss_version: str = UNSET if not isinstance(self.cvss_version, Unset): - cvss_version = CvssVersionEnum(self.cvss_version).value issuer: str = UNSET if not isinstance(self.issuer, Unset): - issuer = IssuerEnum(self.issuer).value score = self.score - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + vector = self.vector + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -66,17 +94,27 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - affect = self.affect - comment = self.comment + affect: Union[Unset, str] = UNSET + if not isinstance(self.affect, Unset): + affect = str(self.affect) + + comment: Union[None, Unset, str] + if isinstance(self.comment, Unset): + comment = UNSET + else: + comment = self.comment + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version @@ -112,8 +150,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() + # } _cvss_version = d.pop("cvss_version", UNSET) cvss_version: CvssVersionEnum if isinstance(_cvss_version, Unset): @@ -121,6 +162,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cvss_version = CvssVersionEnum(_cvss_version) + # } _issuer = d.pop("issuer", UNSET) issuer: IssuerEnum if isinstance(_issuer, Unset): @@ -130,7 +172,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: score = d.pop("score", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) vector = d.pop("vector", UNSET) @@ -138,19 +186,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -158,6 +205,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -165,10 +213,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - affect = d.pop("affect", UNSET) + # } + _affect = d.pop("affect", UNSET) + affect: Union[Unset, UUID] + if isinstance(_affect, Unset): + affect = UNSET + else: + affect = UUID(_affect) + + def _parse_comment(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) - comment = d.pop("comment", UNSET) + comment = _parse_comment(d.pop("comment", UNSET)) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -209,14 +271,14 @@ def get_fields(): "cvss_version": CvssVersionEnum, "issuer": IssuerEnum, "score": float, - "uuid": str, + "uuid": UUID, "vector": str, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "affect": str, - "comment": str, + "affect": UUID, + "comment": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -224,7 +286,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_update_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_update_response_200.py index 3b31999..0799c84 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_update_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_cvss_scores_update_response_200.py @@ -1,58 +1,86 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.cvss_version_enum import CvssVersionEnum from ..models.issuer_enum import IssuerEnum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1AffectsCvssScoresUpdateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsCvssScoresUpdateResponse200(OSIDBModel): - """ """ + """ + Attributes: + cvss_version (CvssVersionEnum): + issuer (IssuerEnum): + score (float): + uuid (UUID): + vector (str): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affect (Union[Unset, UUID]): + comment (Union[None, Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ cvss_version: CvssVersionEnum issuer: IssuerEnum score: float - uuid: str + uuid: UUID vector: str embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - affect: Union[Unset, str] = UNSET - comment: Union[Unset, None, str] = UNSET + affect: Union[Unset, UUID] = UNSET + comment: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cvss_version: str = UNSET if not isinstance(self.cvss_version, Unset): - cvss_version = CvssVersionEnum(self.cvss_version).value issuer: str = UNSET if not isinstance(self.issuer, Unset): - issuer = IssuerEnum(self.issuer).value score = self.score - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + vector = self.vector + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -66,17 +94,27 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - affect = self.affect - comment = self.comment + affect: Union[Unset, str] = UNSET + if not isinstance(self.affect, Unset): + affect = str(self.affect) + + comment: Union[None, Unset, str] + if isinstance(self.comment, Unset): + comment = UNSET + else: + comment = self.comment + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version @@ -112,8 +150,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() + # } _cvss_version = d.pop("cvss_version", UNSET) cvss_version: CvssVersionEnum if isinstance(_cvss_version, Unset): @@ -121,6 +162,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cvss_version = CvssVersionEnum(_cvss_version) + # } _issuer = d.pop("issuer", UNSET) issuer: IssuerEnum if isinstance(_issuer, Unset): @@ -130,7 +172,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: score = d.pop("score", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) vector = d.pop("vector", UNSET) @@ -138,19 +186,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -158,6 +205,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -165,10 +213,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - affect = d.pop("affect", UNSET) + # } + _affect = d.pop("affect", UNSET) + affect: Union[Unset, UUID] + if isinstance(_affect, Unset): + affect = UNSET + else: + affect = UUID(_affect) + + def _parse_comment(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) - comment = d.pop("comment", UNSET) + comment = _parse_comment(d.pop("comment", UNSET)) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -209,14 +271,14 @@ def get_fields(): "cvss_version": CvssVersionEnum, "issuer": IssuerEnum, "score": float, - "uuid": str, + "uuid": UUID, "vector": str, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "affect": str, - "comment": str, + "affect": UUID, + "comment": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -224,7 +286,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_destroy_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_destroy_response_200.py index 47fea84..8252bc9 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_destroy_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_destroy_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="OsidbApiV1AffectsDestroyResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsDestroyResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_affectedness.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_affectedness.py index 781ba64..c6baeda 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_affectedness.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_affectedness.py @@ -2,10 +2,10 @@ class OsidbApiV1AffectsListAffectedness(str, Enum): - VALUE_0 = "" AFFECTED = "AFFECTED" NEW = "NEW" NOTAFFECTED = "NOTAFFECTED" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_flaw_impact.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_flaw_impact.py index 3c9764d..a8c1be5 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_flaw_impact.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_flaw_impact.py @@ -2,11 +2,11 @@ class OsidbApiV1AffectsListFlawImpact(str, Enum): - VALUE_0 = "" CRITICAL = "CRITICAL" IMPORTANT = "IMPORTANT" LOW = "LOW" MODERATE = "MODERATE" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_flaw_source.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_flaw_source.py index 36f99fc..72d9d18 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_flaw_source.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_flaw_source.py @@ -2,7 +2,6 @@ class OsidbApiV1AffectsListFlawSource(str, Enum): - VALUE_0 = "" ADOBE = "ADOBE" APPLE = "APPLE" ASF = "ASF" @@ -86,6 +85,7 @@ class OsidbApiV1AffectsListFlawSource(str, Enum): TWITTER = "TWITTER" UBUNTU = "UBUNTU" UPSTREAM = "UPSTREAM" + VALUE_0 = "" VENDORSEC = "VENDORSEC" VULNWATCH = "VULNWATCH" WIRESHARK = "WIRESHARK" diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_impact.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_impact.py index d85b901..5a83e4d 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_impact.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_impact.py @@ -2,11 +2,11 @@ class OsidbApiV1AffectsListImpact(str, Enum): - VALUE_0 = "" CRITICAL = "CRITICAL" IMPORTANT = "IMPORTANT" LOW = "LOW" MODERATE = "MODERATE" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_order_item.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_order_item.py index b57f744..fbbb06f 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_order_item.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_order_item.py @@ -2,43 +2,6 @@ class OsidbApiV1AffectsListOrderItem(str, Enum): - VALUE_0 = "-affectedness" - VALUE_1 = "-created_dt" - VALUE_2 = "-cvss_scores__comment" - VALUE_3 = "-cvss_scores__created_dt" - VALUE_4 = "-cvss_scores__cvss_version" - VALUE_5 = "-cvss_scores__issuer" - VALUE_6 = "-cvss_scores__score" - VALUE_7 = "-cvss_scores__updated_dt" - VALUE_8 = "-cvss_scores__uuid" - VALUE_9 = "-cvss_scores__vector" - VALUE_10 = "-embargoed" - VALUE_11 = "-flaw__components" - VALUE_12 = "-flaw__created_dt" - VALUE_13 = "-flaw__cve_id" - VALUE_14 = "-flaw__cwe_id" - VALUE_15 = "-flaw__embargoed" - VALUE_16 = "-flaw__impact" - VALUE_17 = "-flaw__reported_dt" - VALUE_18 = "-flaw__source" - VALUE_19 = "-flaw__unembargo_dt" - VALUE_20 = "-flaw__updated_dt" - VALUE_21 = "-flaw__uuid" - VALUE_22 = "-impact" - VALUE_23 = "-ps_component" - VALUE_24 = "-ps_module" - VALUE_25 = "-resolution" - VALUE_26 = "-trackers__created_dt" - VALUE_27 = "-trackers__embargoed" - VALUE_28 = "-trackers__external_system_id" - VALUE_29 = "-trackers__ps_update_stream" - VALUE_30 = "-trackers__resolution" - VALUE_31 = "-trackers__status" - VALUE_32 = "-trackers__type" - VALUE_33 = "-trackers__updated_dt" - VALUE_34 = "-trackers__uuid" - VALUE_35 = "-updated_dt" - VALUE_36 = "-uuid" AFFECTEDNESS = "affectedness" CREATED_DT = "created_dt" CVSS_SCORES_COMMENT = "cvss_scores__comment" @@ -76,6 +39,43 @@ class OsidbApiV1AffectsListOrderItem(str, Enum): TRACKERS_UUID = "trackers__uuid" UPDATED_DT = "updated_dt" UUID = "uuid" + VALUE_0 = "-affectedness" + VALUE_1 = "-created_dt" + VALUE_10 = "-embargoed" + VALUE_11 = "-flaw__components" + VALUE_12 = "-flaw__created_dt" + VALUE_13 = "-flaw__cve_id" + VALUE_14 = "-flaw__cwe_id" + VALUE_15 = "-flaw__embargoed" + VALUE_16 = "-flaw__impact" + VALUE_17 = "-flaw__reported_dt" + VALUE_18 = "-flaw__source" + VALUE_19 = "-flaw__unembargo_dt" + VALUE_2 = "-cvss_scores__comment" + VALUE_20 = "-flaw__updated_dt" + VALUE_21 = "-flaw__uuid" + VALUE_22 = "-impact" + VALUE_23 = "-ps_component" + VALUE_24 = "-ps_module" + VALUE_25 = "-resolution" + VALUE_26 = "-trackers__created_dt" + VALUE_27 = "-trackers__embargoed" + VALUE_28 = "-trackers__external_system_id" + VALUE_29 = "-trackers__ps_update_stream" + VALUE_3 = "-cvss_scores__created_dt" + VALUE_30 = "-trackers__resolution" + VALUE_31 = "-trackers__status" + VALUE_32 = "-trackers__type" + VALUE_33 = "-trackers__updated_dt" + VALUE_34 = "-trackers__uuid" + VALUE_35 = "-updated_dt" + VALUE_36 = "-uuid" + VALUE_4 = "-cvss_scores__cvss_version" + VALUE_5 = "-cvss_scores__issuer" + VALUE_6 = "-cvss_scores__score" + VALUE_7 = "-cvss_scores__updated_dt" + VALUE_8 = "-cvss_scores__uuid" + VALUE_9 = "-cvss_scores__vector" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_resolution.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_resolution.py index 1843d09..0c2c042 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_resolution.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_resolution.py @@ -2,11 +2,11 @@ class OsidbApiV1AffectsListResolution(str, Enum): - VALUE_0 = "" DEFER = "DEFER" DELEGATED = "DELEGATED" FIX = "FIX" OOSS = "OOSS" + VALUE_0 = "" WONTFIX = "WONTFIX" WONTREPORT = "WONTREPORT" diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_response_200.py index e7ae2b6..2ad60cc 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect import Affect from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect import Affect + + T = TypeVar("T", bound="OsidbApiV1AffectsListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['Affect']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[Affect] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["Affect"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Affect - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Affect.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Affect + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Affect.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[Affect], - "next": str, - "previous": str, + "results": list["Affect"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_retrieve_response_200.py index d4ea99c..195d631 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_retrieve_response_200.py @@ -1,78 +1,123 @@ import datetime -from typing import Any, Dict, List, Optional, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect_cvss import AffectCVSS from ..models.affectedness_enum import AffectednessEnum -from ..models.alert import Alert from ..models.blank_enum import BlankEnum from ..models.impact_enum import ImpactEnum from ..models.resolution_enum import ResolutionEnum -from ..models.tracker import Tracker from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect_cvss import AffectCVSS + from ..models.alert import Alert + from ..models.tracker import Tracker + + T = TypeVar("T", bound="OsidbApiV1AffectsRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsRetrieveResponse200(OSIDBModel): - """ """ - - uuid: str + """ + Attributes: + uuid (UUID): + flaw (Union[None, UUID]): + ps_module (str): + ps_product (str): + trackers (list['Tracker']): + delegated_resolution (str): + cvss_scores (list['AffectCVSS']): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affectedness (Union[AffectednessEnum, BlankEnum, Unset]): + resolution (Union[BlankEnum, ResolutionEnum, Unset]): + ps_component (Union[None, Unset, str]): + impact (Union[BlankEnum, ImpactEnum, Unset]): + purl (Union[None, Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + uuid: UUID + flaw: Union[None, UUID] ps_module: str ps_product: str - trackers: List[Tracker] + trackers: list["Tracker"] delegated_resolution: str - cvss_scores: List[AffectCVSS] + cvss_scores: list["AffectCVSS"] embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - flaw: Optional[str] affectedness: Union[AffectednessEnum, BlankEnum, Unset] = UNSET resolution: Union[BlankEnum, ResolutionEnum, Unset] = UNSET - ps_component: Union[Unset, None, str] = UNSET + ps_component: Union[None, Unset, str] = UNSET impact: Union[BlankEnum, ImpactEnum, Unset] = UNSET - purl: Union[Unset, None, str] = UNSET + purl: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + flaw: Union[None, str] + if isinstance(self.flaw, UUID): + flaw = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + else: + flaw = self.flaw - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid ps_module = self.ps_module + ps_product = self.ps_product - trackers: List[Dict[str, Any]] = UNSET + + trackers: list[dict[str, Any]] = UNSET if not isinstance(self.trackers, Unset): trackers = [] for trackers_item_data in self.trackers: - trackers_item: Dict[str, Any] = UNSET + trackers_item: dict[str, Any] = UNSET if not isinstance(trackers_item_data, Unset): trackers_item = trackers_item_data.to_dict() trackers.append(trackers_item) delegated_resolution = self.delegated_resolution - cvss_scores: List[Dict[str, Any]] = UNSET + + cvss_scores: list[dict[str, Any]] = UNSET if not isinstance(self.cvss_scores, Unset): cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() cvss_scores.append(cvss_scores_item) embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -86,20 +131,17 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - flaw = self.flaw affectedness: Union[Unset, str] if isinstance(self.affectedness, Unset): affectedness = UNSET elif isinstance(self.affectedness, AffectednessEnum): affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = AffectednessEnum(self.affectedness).value else: affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = BlankEnum(self.affectedness).value resolution: Union[Unset, str] @@ -108,44 +150,54 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.resolution, ResolutionEnum): resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = ResolutionEnum(self.resolution).value else: resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = BlankEnum(self.resolution).value - ps_component = self.ps_component + ps_component: Union[None, Unset, str] + if isinstance(self.ps_component, Unset): + ps_component = UNSET + else: + ps_component = self.ps_component + impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): impact = UNSET if not isinstance(self.impact, Unset): - impact = ImpactEnum(self.impact).value else: impact = UNSET if not isinstance(self.impact, Unset): - impact = BlankEnum(self.impact).value - purl = self.purl + purl: Union[None, Unset, str] + if isinstance(self.purl, Unset): + purl = UNSET + else: + purl = self.purl + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid + if not isinstance(flaw, Unset): + field_dict["flaw"] = flaw if not isinstance(ps_module, Unset): field_dict["ps_module"] = ps_module if not isinstance(ps_product, Unset): @@ -164,8 +216,6 @@ def to_dict(self) -> Dict[str, Any]: field_dict["created_dt"] = created_dt if not isinstance(updated_dt, Unset): field_dict["updated_dt"] = updated_dt - if not isinstance(flaw, Unset): - field_dict["flaw"] = flaw if not isinstance(affectedness, Unset): field_dict["affectedness"] = affectedness if not isinstance(resolution, Unset): @@ -188,9 +238,40 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect_cvss import AffectCVSS + from ..models.alert import Alert + from ..models.tracker import Tracker + d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) + + def _parse_flaw(data: object) -> Union[None, UUID]: + if data is None: + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _flaw_type_0 = data + flaw_type_0: UUID + if isinstance(_flaw_type_0, Unset): + flaw_type_0 = UNSET + else: + flaw_type_0 = UUID(_flaw_type_0) + + return flaw_type_0 + except: # noqa: E722 + pass + return cast(Union[None, UUID], data) + + flaw = _parse_flaw(d.pop("flaw", UNSET)) ps_module = d.pop("ps_module", UNSET) @@ -198,53 +279,48 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: trackers = [] _trackers = d.pop("trackers", UNSET) - if _trackers is UNSET: - trackers = UNSET - else: - for trackers_item_data in _trackers or []: - _trackers_item = trackers_item_data - trackers_item: Tracker - if isinstance(_trackers_item, Unset): - trackers_item = UNSET - else: - trackers_item = Tracker.from_dict(_trackers_item) + for trackers_item_data in _trackers or []: + # } + _trackers_item = trackers_item_data + trackers_item: Tracker + if isinstance(_trackers_item, Unset): + trackers_item = UNSET + else: + trackers_item = Tracker.from_dict(_trackers_item) - trackers.append(trackers_item) + trackers.append(trackers_item) delegated_resolution = d.pop("delegated_resolution", UNSET) cvss_scores = [] _cvss_scores = d.pop("cvss_scores", UNSET) - if _cvss_scores is UNSET: - cvss_scores = UNSET - else: - for cvss_scores_item_data in _cvss_scores or []: - _cvss_scores_item = cvss_scores_item_data - cvss_scores_item: AffectCVSS - if isinstance(_cvss_scores_item, Unset): - cvss_scores_item = UNSET - else: - cvss_scores_item = AffectCVSS.from_dict(_cvss_scores_item) + for cvss_scores_item_data in _cvss_scores or []: + # } + _cvss_scores_item = cvss_scores_item_data + cvss_scores_item: AffectCVSS + if isinstance(_cvss_scores_item, Unset): + cvss_scores_item = UNSET + else: + cvss_scores_item = AffectCVSS.from_dict(_cvss_scores_item) - cvss_scores.append(cvss_scores_item) + cvss_scores.append(cvss_scores_item) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) - alerts.append(alerts_item) + alerts.append(alerts_item) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -252,6 +328,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -259,18 +336,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - flaw = d.pop("flaw", UNSET) - - def _parse_affectedness( - data: object, - ) -> Union[AffectednessEnum, BlankEnum, Unset]: + def _parse_affectedness(data: object) -> Union[AffectednessEnum, BlankEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _affectedness_type_0 = data - affectedness_type_0: Union[Unset, AffectednessEnum] + affectedness_type_0: AffectednessEnum if isinstance(_affectedness_type_0, Unset): affectedness_type_0 = UNSET else: @@ -281,8 +355,9 @@ def _parse_affectedness( pass if not isinstance(data, str): raise TypeError() + # } _affectedness_type_1 = data - affectedness_type_1: Union[Unset, BlankEnum] + affectedness_type_1: BlankEnum if isinstance(_affectedness_type_1, Unset): affectedness_type_1 = UNSET else: @@ -298,8 +373,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _resolution_type_0 = data - resolution_type_0: Union[Unset, ResolutionEnum] + resolution_type_0: ResolutionEnum if isinstance(_resolution_type_0, Unset): resolution_type_0 = UNSET else: @@ -310,8 +386,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _resolution_type_1 = data - resolution_type_1: Union[Unset, BlankEnum] + resolution_type_1: BlankEnum if isinstance(_resolution_type_1, Unset): resolution_type_1 = UNSET else: @@ -321,7 +398,14 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: resolution = _parse_resolution(d.pop("resolution", UNSET)) - ps_component = d.pop("ps_component", UNSET) + def _parse_ps_component(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + ps_component = _parse_ps_component(d.pop("ps_component", UNSET)) def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: if isinstance(data, Unset): @@ -329,8 +413,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _impact_type_0 = data - impact_type_0: Union[Unset, ImpactEnum] + impact_type_0: ImpactEnum if isinstance(_impact_type_0, Unset): impact_type_0 = UNSET else: @@ -341,8 +426,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _impact_type_1 = data - impact_type_1: Union[Unset, BlankEnum] + impact_type_1: BlankEnum if isinstance(_impact_type_1, Unset): impact_type_1 = UNSET else: @@ -352,8 +438,16 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: impact = _parse_impact(d.pop("impact", UNSET)) - purl = d.pop("purl", UNSET) + def _parse_purl(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + purl = _parse_purl(d.pop("purl", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -369,6 +463,7 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: osidb_api_v1_affects_retrieve_response_200 = cls( uuid=uuid, + flaw=flaw, ps_module=ps_module, ps_product=ps_product, trackers=trackers, @@ -378,7 +473,6 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: alerts=alerts, created_dt=created_dt, updated_dt=updated_dt, - flaw=flaw, affectedness=affectedness, resolution=resolution, ps_component=ps_component, @@ -396,22 +490,22 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, + "flaw": Union[None, UUID], "ps_module": str, "ps_product": str, - "trackers": List[Tracker], + "trackers": list["Tracker"], "delegated_resolution": str, - "cvss_scores": List[AffectCVSS], + "cvss_scores": list["AffectCVSS"], "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "flaw": str, "affectedness": Union[AffectednessEnum, BlankEnum], "resolution": Union[BlankEnum, ResolutionEnum], - "ps_component": str, + "ps_component": Union[None, str], "impact": Union[BlankEnum, ImpactEnum], - "purl": str, + "purl": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -419,7 +513,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_update_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_update_response_200.py index 2248473..b7ee718 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_update_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_affects_update_response_200.py @@ -1,78 +1,123 @@ import datetime -from typing import Any, Dict, List, Optional, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect_cvss import AffectCVSS from ..models.affectedness_enum import AffectednessEnum -from ..models.alert import Alert from ..models.blank_enum import BlankEnum from ..models.impact_enum import ImpactEnum from ..models.resolution_enum import ResolutionEnum -from ..models.tracker import Tracker from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect_cvss import AffectCVSS + from ..models.alert import Alert + from ..models.tracker import Tracker + + T = TypeVar("T", bound="OsidbApiV1AffectsUpdateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AffectsUpdateResponse200(OSIDBModel): - """ """ - - uuid: str + """ + Attributes: + uuid (UUID): + flaw (Union[None, UUID]): + ps_module (str): + ps_product (str): + trackers (list['Tracker']): + delegated_resolution (str): + cvss_scores (list['AffectCVSS']): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affectedness (Union[AffectednessEnum, BlankEnum, Unset]): + resolution (Union[BlankEnum, ResolutionEnum, Unset]): + ps_component (Union[None, Unset, str]): + impact (Union[BlankEnum, ImpactEnum, Unset]): + purl (Union[None, Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + uuid: UUID + flaw: Union[None, UUID] ps_module: str ps_product: str - trackers: List[Tracker] + trackers: list["Tracker"] delegated_resolution: str - cvss_scores: List[AffectCVSS] + cvss_scores: list["AffectCVSS"] embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - flaw: Optional[str] affectedness: Union[AffectednessEnum, BlankEnum, Unset] = UNSET resolution: Union[BlankEnum, ResolutionEnum, Unset] = UNSET - ps_component: Union[Unset, None, str] = UNSET + ps_component: Union[None, Unset, str] = UNSET impact: Union[BlankEnum, ImpactEnum, Unset] = UNSET - purl: Union[Unset, None, str] = UNSET + purl: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + flaw: Union[None, str] + if isinstance(self.flaw, UUID): + flaw = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + else: + flaw = self.flaw - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid ps_module = self.ps_module + ps_product = self.ps_product - trackers: List[Dict[str, Any]] = UNSET + + trackers: list[dict[str, Any]] = UNSET if not isinstance(self.trackers, Unset): trackers = [] for trackers_item_data in self.trackers: - trackers_item: Dict[str, Any] = UNSET + trackers_item: dict[str, Any] = UNSET if not isinstance(trackers_item_data, Unset): trackers_item = trackers_item_data.to_dict() trackers.append(trackers_item) delegated_resolution = self.delegated_resolution - cvss_scores: List[Dict[str, Any]] = UNSET + + cvss_scores: list[dict[str, Any]] = UNSET if not isinstance(self.cvss_scores, Unset): cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() cvss_scores.append(cvss_scores_item) embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -86,20 +131,17 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - flaw = self.flaw affectedness: Union[Unset, str] if isinstance(self.affectedness, Unset): affectedness = UNSET elif isinstance(self.affectedness, AffectednessEnum): affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = AffectednessEnum(self.affectedness).value else: affectedness = UNSET if not isinstance(self.affectedness, Unset): - affectedness = BlankEnum(self.affectedness).value resolution: Union[Unset, str] @@ -108,44 +150,54 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.resolution, ResolutionEnum): resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = ResolutionEnum(self.resolution).value else: resolution = UNSET if not isinstance(self.resolution, Unset): - resolution = BlankEnum(self.resolution).value - ps_component = self.ps_component + ps_component: Union[None, Unset, str] + if isinstance(self.ps_component, Unset): + ps_component = UNSET + else: + ps_component = self.ps_component + impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): impact = UNSET if not isinstance(self.impact, Unset): - impact = ImpactEnum(self.impact).value else: impact = UNSET if not isinstance(self.impact, Unset): - impact = BlankEnum(self.impact).value - purl = self.purl + purl: Union[None, Unset, str] + if isinstance(self.purl, Unset): + purl = UNSET + else: + purl = self.purl + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid + if not isinstance(flaw, Unset): + field_dict["flaw"] = flaw if not isinstance(ps_module, Unset): field_dict["ps_module"] = ps_module if not isinstance(ps_product, Unset): @@ -164,8 +216,6 @@ def to_dict(self) -> Dict[str, Any]: field_dict["created_dt"] = created_dt if not isinstance(updated_dt, Unset): field_dict["updated_dt"] = updated_dt - if not isinstance(flaw, Unset): - field_dict["flaw"] = flaw if not isinstance(affectedness, Unset): field_dict["affectedness"] = affectedness if not isinstance(resolution, Unset): @@ -188,9 +238,40 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect_cvss import AffectCVSS + from ..models.alert import Alert + from ..models.tracker import Tracker + d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) + + def _parse_flaw(data: object) -> Union[None, UUID]: + if data is None: + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _flaw_type_0 = data + flaw_type_0: UUID + if isinstance(_flaw_type_0, Unset): + flaw_type_0 = UNSET + else: + flaw_type_0 = UUID(_flaw_type_0) + + return flaw_type_0 + except: # noqa: E722 + pass + return cast(Union[None, UUID], data) + + flaw = _parse_flaw(d.pop("flaw", UNSET)) ps_module = d.pop("ps_module", UNSET) @@ -198,53 +279,48 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: trackers = [] _trackers = d.pop("trackers", UNSET) - if _trackers is UNSET: - trackers = UNSET - else: - for trackers_item_data in _trackers or []: - _trackers_item = trackers_item_data - trackers_item: Tracker - if isinstance(_trackers_item, Unset): - trackers_item = UNSET - else: - trackers_item = Tracker.from_dict(_trackers_item) + for trackers_item_data in _trackers or []: + # } + _trackers_item = trackers_item_data + trackers_item: Tracker + if isinstance(_trackers_item, Unset): + trackers_item = UNSET + else: + trackers_item = Tracker.from_dict(_trackers_item) - trackers.append(trackers_item) + trackers.append(trackers_item) delegated_resolution = d.pop("delegated_resolution", UNSET) cvss_scores = [] _cvss_scores = d.pop("cvss_scores", UNSET) - if _cvss_scores is UNSET: - cvss_scores = UNSET - else: - for cvss_scores_item_data in _cvss_scores or []: - _cvss_scores_item = cvss_scores_item_data - cvss_scores_item: AffectCVSS - if isinstance(_cvss_scores_item, Unset): - cvss_scores_item = UNSET - else: - cvss_scores_item = AffectCVSS.from_dict(_cvss_scores_item) + for cvss_scores_item_data in _cvss_scores or []: + # } + _cvss_scores_item = cvss_scores_item_data + cvss_scores_item: AffectCVSS + if isinstance(_cvss_scores_item, Unset): + cvss_scores_item = UNSET + else: + cvss_scores_item = AffectCVSS.from_dict(_cvss_scores_item) - cvss_scores.append(cvss_scores_item) + cvss_scores.append(cvss_scores_item) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) - alerts.append(alerts_item) + alerts.append(alerts_item) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -252,6 +328,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -259,18 +336,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - flaw = d.pop("flaw", UNSET) - - def _parse_affectedness( - data: object, - ) -> Union[AffectednessEnum, BlankEnum, Unset]: + def _parse_affectedness(data: object) -> Union[AffectednessEnum, BlankEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _affectedness_type_0 = data - affectedness_type_0: Union[Unset, AffectednessEnum] + affectedness_type_0: AffectednessEnum if isinstance(_affectedness_type_0, Unset): affectedness_type_0 = UNSET else: @@ -281,8 +355,9 @@ def _parse_affectedness( pass if not isinstance(data, str): raise TypeError() + # } _affectedness_type_1 = data - affectedness_type_1: Union[Unset, BlankEnum] + affectedness_type_1: BlankEnum if isinstance(_affectedness_type_1, Unset): affectedness_type_1 = UNSET else: @@ -298,8 +373,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _resolution_type_0 = data - resolution_type_0: Union[Unset, ResolutionEnum] + resolution_type_0: ResolutionEnum if isinstance(_resolution_type_0, Unset): resolution_type_0 = UNSET else: @@ -310,8 +386,9 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _resolution_type_1 = data - resolution_type_1: Union[Unset, BlankEnum] + resolution_type_1: BlankEnum if isinstance(_resolution_type_1, Unset): resolution_type_1 = UNSET else: @@ -321,7 +398,14 @@ def _parse_resolution(data: object) -> Union[BlankEnum, ResolutionEnum, Unset]: resolution = _parse_resolution(d.pop("resolution", UNSET)) - ps_component = d.pop("ps_component", UNSET) + def _parse_ps_component(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + ps_component = _parse_ps_component(d.pop("ps_component", UNSET)) def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: if isinstance(data, Unset): @@ -329,8 +413,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _impact_type_0 = data - impact_type_0: Union[Unset, ImpactEnum] + impact_type_0: ImpactEnum if isinstance(_impact_type_0, Unset): impact_type_0 = UNSET else: @@ -341,8 +426,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _impact_type_1 = data - impact_type_1: Union[Unset, BlankEnum] + impact_type_1: BlankEnum if isinstance(_impact_type_1, Unset): impact_type_1 = UNSET else: @@ -352,8 +438,16 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: impact = _parse_impact(d.pop("impact", UNSET)) - purl = d.pop("purl", UNSET) + def _parse_purl(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + purl = _parse_purl(d.pop("purl", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -369,6 +463,7 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: osidb_api_v1_affects_update_response_200 = cls( uuid=uuid, + flaw=flaw, ps_module=ps_module, ps_product=ps_product, trackers=trackers, @@ -378,7 +473,6 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: alerts=alerts, created_dt=created_dt, updated_dt=updated_dt, - flaw=flaw, affectedness=affectedness, resolution=resolution, ps_component=ps_component, @@ -396,22 +490,22 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, + "flaw": Union[None, UUID], "ps_module": str, "ps_product": str, - "trackers": List[Tracker], + "trackers": list["Tracker"], "delegated_resolution": str, - "cvss_scores": List[AffectCVSS], + "cvss_scores": list["AffectCVSS"], "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "flaw": str, "affectedness": Union[AffectednessEnum, BlankEnum], "resolution": Union[BlankEnum, ResolutionEnum], - "ps_component": str, + "ps_component": Union[None, str], "impact": Union[BlankEnum, ImpactEnum], - "purl": str, + "purl": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -419,7 +513,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_alerts_list_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_alerts_list_response_200.py index de937f6..a40ae97 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_alerts_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_alerts_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1AlertsListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AlertsListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['Alert']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[Alert] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["Alert"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Alert - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Alert.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Alert + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Alert.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[Alert], - "next": str, - "previous": str, + "results": list["Alert"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_alerts_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_alerts_retrieve_response_200.py index c0b02ad..f9c5a3e 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_alerts_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_alerts_retrieve_response_200.py @@ -1,7 +1,9 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..models.alert_type_enum import AlertTypeEnum @@ -10,14 +12,27 @@ T = TypeVar("T", bound="OsidbApiV1AlertsRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AlertsRetrieveResponse200(OSIDBModel): - """ """ - - uuid: str + """ + Attributes: + uuid (UUID): + name (str): + description (str): + parent_uuid (UUID): + parent_model (str): + alert_type (Union[Unset, AlertTypeEnum]): + resolution_steps (Union[Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + uuid: UUID name: str description: str - parent_uuid: str + parent_uuid: UUID parent_model: str alert_type: Union[Unset, AlertTypeEnum] = UNSET resolution_steps: Union[Unset, str] = UNSET @@ -25,29 +40,40 @@ class OsidbApiV1AlertsRetrieveResponse200(OSIDBModel): env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid name = self.name + description = self.description - parent_uuid = self.parent_uuid + + parent_uuid: str = UNSET + if not isinstance(self.parent_uuid, Unset): + parent_uuid = str(self.parent_uuid) + parent_model = self.parent_model + alert_type: Union[Unset, str] = UNSET if not isinstance(self.alert_type, Unset): - alert_type = AlertTypeEnum(self.alert_type).value resolution_steps = self.resolution_steps + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid @@ -75,18 +101,31 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) name = d.pop("name", UNSET) description = d.pop("description", UNSET) - parent_uuid = d.pop("parent_uuid", UNSET) + # } + _parent_uuid = d.pop("parent_uuid", UNSET) + parent_uuid: UUID + if isinstance(_parent_uuid, Unset): + parent_uuid = UNSET + else: + parent_uuid = UUID(_parent_uuid) parent_model = d.pop("parent_model", UNSET) + # } _alert_type = d.pop("alert_type", UNSET) alert_type: Union[Unset, AlertTypeEnum] if isinstance(_alert_type, Unset): @@ -96,6 +135,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: resolution_steps = d.pop("resolution_steps", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -129,10 +169,10 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, "name": str, "description": str, - "parent_uuid": str, + "parent_uuid": UUID, "parent_model": str, "alert_type": AlertTypeEnum, "resolution_steps": str, @@ -143,7 +183,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_audit_list_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_audit_list_response_200.py index 5832c01..af59f3d 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_audit_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_audit_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.audit import Audit from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.audit import Audit + + T = TypeVar("T", bound="OsidbApiV1AuditListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AuditListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['Audit']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[Audit] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["Audit"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.audit import Audit + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Audit - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Audit.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Audit + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Audit.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[Audit], - "next": str, - "previous": str, + "results": list["Audit"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_audit_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_audit_retrieve_response_200.py index ffac840..ea1118e 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_audit_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_audit_retrieve_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,9 +10,23 @@ T = TypeVar("T", bound="OsidbApiV1AuditRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AuditRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + pgh_created_at (datetime.datetime): When the event was created. + pgh_slug (str): The unique identifier across all event tables. + pgh_obj_model (str): The object model. + pgh_label (str): The event label. + pgh_diff (Any): The diff between the previous event of the same label. + pgh_data (str): + pgh_obj_id (Union[None, Unset, str]): The primary key of the object. + pgh_context (Union[Unset, Any]): The context associated with the event. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ pgh_created_at: datetime.datetime pgh_slug: str @@ -19,26 +34,35 @@ class OsidbApiV1AuditRetrieveResponse200(OSIDBModel): pgh_label: str pgh_diff: Any pgh_data: str - pgh_obj_id: Union[Unset, None, str] = UNSET + pgh_obj_id: Union[None, Unset, str] = UNSET pgh_context: Union[Unset, Any] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: pgh_created_at: str = UNSET if not isinstance(self.pgh_created_at, Unset): pgh_created_at = self.pgh_created_at.isoformat() pgh_slug = self.pgh_slug + pgh_obj_model = self.pgh_obj_model + pgh_label = self.pgh_label + pgh_diff = self.pgh_diff pgh_data = self.pgh_data - pgh_obj_id = self.pgh_obj_id + + pgh_obj_id: Union[None, Unset, str] + if isinstance(self.pgh_obj_id, Unset): + pgh_obj_id = UNSET + else: + pgh_obj_id = self.pgh_obj_id + pgh_context = self.pgh_context dt: Union[Unset, str] = UNSET @@ -46,10 +70,12 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(pgh_created_at, Unset): field_dict["pgh_created_at"] = pgh_created_at @@ -79,8 +105,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _pgh_created_at = d.pop("pgh_created_at", UNSET) pgh_created_at: datetime.datetime if isinstance(_pgh_created_at, Unset): @@ -98,10 +125,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: pgh_data = d.pop("pgh_data", UNSET) - pgh_obj_id = d.pop("pgh_obj_id", UNSET) + def _parse_pgh_obj_id(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + pgh_obj_id = _parse_pgh_obj_id(d.pop("pgh_obj_id", UNSET)) pgh_context = d.pop("pgh_context", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -142,7 +177,7 @@ def get_fields(): "pgh_label": str, "pgh_diff": Any, "pgh_data": str, - "pgh_obj_id": str, + "pgh_obj_id": Union[None, str], "pgh_context": Any, "dt": datetime.datetime, "env": str, @@ -151,7 +186,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_audit_update_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_audit_update_response_200.py index 794dce7..1ce0cd9 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_audit_update_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_audit_update_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,9 +10,23 @@ T = TypeVar("T", bound="OsidbApiV1AuditUpdateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1AuditUpdateResponse200(OSIDBModel): - """ """ + """ + Attributes: + pgh_created_at (datetime.datetime): When the event was created. + pgh_slug (str): The unique identifier across all event tables. + pgh_obj_model (str): The object model. + pgh_label (str): The event label. + pgh_diff (Any): The diff between the previous event of the same label. + pgh_data (str): + pgh_obj_id (Union[None, Unset, str]): The primary key of the object. + pgh_context (Union[Unset, Any]): The context associated with the event. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ pgh_created_at: datetime.datetime pgh_slug: str @@ -19,26 +34,35 @@ class OsidbApiV1AuditUpdateResponse200(OSIDBModel): pgh_label: str pgh_diff: Any pgh_data: str - pgh_obj_id: Union[Unset, None, str] = UNSET + pgh_obj_id: Union[None, Unset, str] = UNSET pgh_context: Union[Unset, Any] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: pgh_created_at: str = UNSET if not isinstance(self.pgh_created_at, Unset): pgh_created_at = self.pgh_created_at.isoformat() pgh_slug = self.pgh_slug + pgh_obj_model = self.pgh_obj_model + pgh_label = self.pgh_label + pgh_diff = self.pgh_diff pgh_data = self.pgh_data - pgh_obj_id = self.pgh_obj_id + + pgh_obj_id: Union[None, Unset, str] + if isinstance(self.pgh_obj_id, Unset): + pgh_obj_id = UNSET + else: + pgh_obj_id = self.pgh_obj_id + pgh_context = self.pgh_context dt: Union[Unset, str] = UNSET @@ -46,10 +70,12 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(pgh_created_at, Unset): field_dict["pgh_created_at"] = pgh_created_at @@ -79,8 +105,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _pgh_created_at = d.pop("pgh_created_at", UNSET) pgh_created_at: datetime.datetime if isinstance(_pgh_created_at, Unset): @@ -98,10 +125,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: pgh_data = d.pop("pgh_data", UNSET) - pgh_obj_id = d.pop("pgh_obj_id", UNSET) + def _parse_pgh_obj_id(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + pgh_obj_id = _parse_pgh_obj_id(d.pop("pgh_obj_id", UNSET)) pgh_context = d.pop("pgh_context", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -142,7 +177,7 @@ def get_fields(): "pgh_label": str, "pgh_diff": Any, "pgh_data": str, - "pgh_obj_id": str, + "pgh_obj_id": Union[None, str], "pgh_context": Any, "dt": datetime.datetime, "env": str, @@ -151,7 +186,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_create_response_201.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_create_response_201.py index 28b8a13..5d07213 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_create_response_201.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_create_response_201.py @@ -1,46 +1,78 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1FlawsAcknowledgmentsCreateResponse201") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsAcknowledgmentsCreateResponse201(OSIDBModel): - """ """ + """ + Attributes: + name (str): + affiliation (str): + from_upstream (bool): + flaw (UUID): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ name: str affiliation: str from_upstream: bool - flaw: str - uuid: str + flaw: UUID + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: name = self.name + affiliation = self.affiliation + from_upstream = self.from_upstream - flaw = self.flaw - uuid = self.uuid + + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -59,10 +91,12 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(name, Unset): field_dict["name"] = name @@ -94,7 +128,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() name = d.pop("name", UNSET) @@ -102,27 +138,38 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from_upstream = d.pop("from_upstream", UNSET) - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -130,6 +177,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -137,6 +185,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -175,10 +224,10 @@ def get_fields(): "name": str, "affiliation": str, "from_upstream": bool, - "flaw": str, - "uuid": str, + "flaw": UUID, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "dt": datetime.datetime, @@ -188,7 +237,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_destroy_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_destroy_response_200.py index b1d77fc..ee2bd79 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_destroy_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_destroy_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="OsidbApiV1FlawsAcknowledgmentsDestroyResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsAcknowledgmentsDestroyResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -64,9 +74,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: version=version, ) - osidb_api_v1_flaws_acknowledgments_destroy_response_200.additional_properties = ( - d - ) + osidb_api_v1_flaws_acknowledgments_destroy_response_200.additional_properties = d return osidb_api_v1_flaws_acknowledgments_destroy_response_200 @staticmethod @@ -79,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_list_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_list_response_200.py index 2672dbd..ea58494 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw_acknowledgment import FlawAcknowledgment from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_acknowledgment import FlawAcknowledgment + + T = TypeVar("T", bound="OsidbApiV1FlawsAcknowledgmentsListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsAcknowledgmentsListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['FlawAcknowledgment']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[FlawAcknowledgment] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["FlawAcknowledgment"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_acknowledgment import FlawAcknowledgment + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: FlawAcknowledgment - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = FlawAcknowledgment.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: FlawAcknowledgment + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = FlawAcknowledgment.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[FlawAcknowledgment], - "next": str, - "previous": str, + "results": list["FlawAcknowledgment"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_retrieve_response_200.py index 92b6c05..b4a33f5 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_retrieve_response_200.py @@ -1,46 +1,78 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsAcknowledgmentsRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + name (str): + affiliation (str): + from_upstream (bool): + flaw (UUID): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ name: str affiliation: str from_upstream: bool - flaw: str - uuid: str + flaw: UUID + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: name = self.name + affiliation = self.affiliation + from_upstream = self.from_upstream - flaw = self.flaw - uuid = self.uuid + + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -59,10 +91,12 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(name, Unset): field_dict["name"] = name @@ -94,7 +128,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() name = d.pop("name", UNSET) @@ -102,27 +138,38 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from_upstream = d.pop("from_upstream", UNSET) - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -130,6 +177,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -137,6 +185,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -166,9 +215,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: version=version, ) - osidb_api_v1_flaws_acknowledgments_retrieve_response_200.additional_properties = ( - d - ) + osidb_api_v1_flaws_acknowledgments_retrieve_response_200.additional_properties = d return osidb_api_v1_flaws_acknowledgments_retrieve_response_200 @staticmethod @@ -177,10 +224,10 @@ def get_fields(): "name": str, "affiliation": str, "from_upstream": bool, - "flaw": str, - "uuid": str, + "flaw": UUID, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "dt": datetime.datetime, @@ -190,7 +237,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_update_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_update_response_200.py index af85fda..387749a 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_update_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_acknowledgments_update_response_200.py @@ -1,46 +1,78 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1FlawsAcknowledgmentsUpdateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsAcknowledgmentsUpdateResponse200(OSIDBModel): - """ """ + """ + Attributes: + name (str): + affiliation (str): + from_upstream (bool): + flaw (UUID): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ name: str affiliation: str from_upstream: bool - flaw: str - uuid: str + flaw: UUID + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: name = self.name + affiliation = self.affiliation + from_upstream = self.from_upstream - flaw = self.flaw - uuid = self.uuid + + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -59,10 +91,12 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(name, Unset): field_dict["name"] = name @@ -94,7 +128,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() name = d.pop("name", UNSET) @@ -102,27 +138,38 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from_upstream = d.pop("from_upstream", UNSET) - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -130,6 +177,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -137,6 +185,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -175,10 +224,10 @@ def get_fields(): "name": str, "affiliation": str, "from_upstream": bool, - "flaw": str, - "uuid": str, + "flaw": UUID, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "dt": datetime.datetime, @@ -188,7 +237,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_comments_create_response_201.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_comments_create_response_201.py index da89fed..6962a64 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_comments_create_response_201.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_comments_create_response_201.py @@ -1,24 +1,48 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1FlawsCommentsCreateResponse201") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsCommentsCreateResponse201(OSIDBModel): - """ """ - - flaw: str + """ + Attributes: + flaw (UUID): + text (str): + uuid (UUID): + external_system_id (str): + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + order (Union[Unset, int]): + creator (Union[Unset, str]): + is_private (Union[Unset, bool]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + flaw: UUID text: str - uuid: str + uuid: UUID external_system_id: str - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime embargoed: bool @@ -29,18 +53,26 @@ class OsidbApiV1FlawsCommentsCreateResponse201(OSIDBModel): env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) - def to_dict(self) -> Dict[str, Any]: - flaw = self.flaw text = self.text - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + external_system_id = self.external_system_id - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -55,18 +87,24 @@ def to_dict(self) -> Dict[str, Any]: updated_dt = self.updated_dt.isoformat() embargoed = self.embargoed + order = self.order + creator = self.creator + is_private = self.is_private + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(flaw, Unset): field_dict["flaw"] = flaw @@ -102,31 +140,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) text = d.pop("text", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) external_system_id = d.pop("external_system_id", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -134,6 +185,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -149,6 +201,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: is_private = d.pop("is_private", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -186,11 +239,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "flaw": str, + "flaw": UUID, "text": str, - "uuid": str, + "uuid": UUID, "external_system_id": str, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "embargoed": bool, @@ -204,7 +257,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_comments_list_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_comments_list_response_200.py index 6210862..0bd68bf 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_comments_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_comments_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw_comment import FlawComment from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_comment import FlawComment + + T = TypeVar("T", bound="OsidbApiV1FlawsCommentsListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsCommentsListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['FlawComment']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[FlawComment] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["FlawComment"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_comment import FlawComment + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: FlawComment - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = FlawComment.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: FlawComment + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = FlawComment.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[FlawComment], - "next": str, - "previous": str, + "results": list["FlawComment"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_comments_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_comments_retrieve_response_200.py index 7f5eefd..69ff9b5 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_comments_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_comments_retrieve_response_200.py @@ -1,24 +1,48 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1FlawsCommentsRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsCommentsRetrieveResponse200(OSIDBModel): - """ """ - - flaw: str + """ + Attributes: + flaw (UUID): + text (str): + uuid (UUID): + external_system_id (str): + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + order (Union[Unset, int]): + creator (Union[Unset, str]): + is_private (Union[Unset, bool]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + flaw: UUID text: str - uuid: str + uuid: UUID external_system_id: str - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime embargoed: bool @@ -29,18 +53,26 @@ class OsidbApiV1FlawsCommentsRetrieveResponse200(OSIDBModel): env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) - def to_dict(self) -> Dict[str, Any]: - flaw = self.flaw text = self.text - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + external_system_id = self.external_system_id - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -55,18 +87,24 @@ def to_dict(self) -> Dict[str, Any]: updated_dt = self.updated_dt.isoformat() embargoed = self.embargoed + order = self.order + creator = self.creator + is_private = self.is_private + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(flaw, Unset): field_dict["flaw"] = flaw @@ -102,31 +140,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) text = d.pop("text", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) external_system_id = d.pop("external_system_id", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -134,6 +185,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -149,6 +201,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: is_private = d.pop("is_private", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -186,11 +239,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "flaw": str, + "flaw": UUID, "text": str, - "uuid": str, + "uuid": UUID, "external_system_id": str, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "embargoed": bool, @@ -204,7 +257,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_create_response_201.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_create_response_201.py index eb74b14..a25ab8b 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_create_response_201.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_create_response_201.py @@ -1,62 +1,106 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect import Affect -from ..models.alert import Alert from ..models.blank_enum import BlankEnum -from ..models.comment import Comment -from ..models.flaw_acknowledgment import FlawAcknowledgment -from ..models.flaw_classification import FlawClassification -from ..models.flaw_cvss import FlawCVSS -from ..models.flaw_reference import FlawReference from ..models.impact_enum import ImpactEnum from ..models.major_incident_state_enum import MajorIncidentStateEnum from ..models.nist_cvss_validation_enum import NistCvssValidationEnum -from ..models.package import Package from ..models.requires_cve_description_enum import RequiresCveDescriptionEnum from ..models.source_be_0_enum import SourceBe0Enum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect import Affect + from ..models.alert import Alert + from ..models.comment import Comment + from ..models.flaw_acknowledgment import FlawAcknowledgment + from ..models.flaw_classification import FlawClassification + from ..models.flaw_cvss import FlawCVSS + from ..models.flaw_reference import FlawReference + from ..models.package import Package + + T = TypeVar("T", bound="OsidbApiV1FlawsCreateResponse201") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsCreateResponse201(OSIDBModel): - """ """ - - uuid: str + """ + Attributes: + uuid (UUID): + title (str): + trackers (list[str]): + comment_zero (str): + affects (list['Affect']): + comments (list['Comment']): + package_versions (list['Package']): + acknowledgments (list['FlawAcknowledgment']): + references (list['FlawReference']): + cvss_scores (list['FlawCVSS']): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + classification (FlawClassification): + alerts (list['Alert']): + cve_id (Union[None, Unset, str]): + impact (Union[BlankEnum, ImpactEnum, Unset]): + components (Union[Unset, list[str]]): + cve_description (Union[Unset, str]): + requires_cve_description (Union[BlankEnum, RequiresCveDescriptionEnum, Unset]): + statement (Union[Unset, str]): + cwe_id (Union[Unset, str]): + unembargo_dt (Union[None, Unset, datetime.datetime]): + source (Union[BlankEnum, SourceBe0Enum, Unset]): + reported_dt (Union[None, Unset, datetime.datetime]): + mitigation (Union[Unset, str]): + major_incident_state (Union[BlankEnum, MajorIncidentStateEnum, Unset]): + major_incident_start_dt (Union[None, Unset, datetime.datetime]): + nist_cvss_validation (Union[BlankEnum, NistCvssValidationEnum, Unset]): + group_key (Union[Unset, str]): + owner (Union[Unset, str]): + task_key (Union[Unset, str]): + team_id (Union[Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + uuid: UUID title: str - trackers: List[str] + trackers: list[str] comment_zero: str - affects: List[Affect] - comments: List[Comment] - package_versions: List[Package] - acknowledgments: List[FlawAcknowledgment] - references: List[FlawReference] - cvss_scores: List[FlawCVSS] + affects: list["Affect"] + comments: list["Comment"] + package_versions: list["Package"] + acknowledgments: list["FlawAcknowledgment"] + references: list["FlawReference"] + cvss_scores: list["FlawCVSS"] embargoed: bool created_dt: datetime.datetime updated_dt: datetime.datetime - classification: FlawClassification - alerts: List[Alert] - cve_id: Union[Unset, None, str] = UNSET + classification: "FlawClassification" + alerts: list["Alert"] + cve_id: Union[None, Unset, str] = UNSET impact: Union[BlankEnum, ImpactEnum, Unset] = UNSET - components: Union[Unset, List[str]] = UNSET + components: Union[Unset, list[str]] = UNSET cve_description: Union[Unset, str] = UNSET - requires_cve_description: Union[ - BlankEnum, RequiresCveDescriptionEnum, Unset - ] = UNSET + requires_cve_description: Union[BlankEnum, RequiresCveDescriptionEnum, Unset] = UNSET statement: Union[Unset, str] = UNSET cwe_id: Union[Unset, str] = UNSET - unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET + unembargo_dt: Union[None, Unset, datetime.datetime] = UNSET source: Union[BlankEnum, SourceBe0Enum, Unset] = UNSET - reported_dt: Union[Unset, None, datetime.datetime] = UNSET + reported_dt: Union[None, Unset, datetime.datetime] = UNSET mitigation: Union[Unset, str] = UNSET major_incident_state: Union[BlankEnum, MajorIncidentStateEnum, Unset] = UNSET - major_incident_start_dt: Union[Unset, None, datetime.datetime] = UNSET + major_incident_start_dt: Union[None, Unset, datetime.datetime] = UNSET nist_cvss_validation: Union[BlankEnum, NistCvssValidationEnum, Unset] = UNSET group_key: Union[Unset, str] = UNSET owner: Union[Unset, str] = UNSET @@ -66,77 +110,83 @@ class OsidbApiV1FlawsCreateResponse201(OSIDBModel): env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid title = self.title - trackers: List[str] = UNSET + + trackers: list[str] = UNSET if not isinstance(self.trackers, Unset): trackers = self.trackers comment_zero = self.comment_zero - affects: List[Dict[str, Any]] = UNSET + + affects: list[dict[str, Any]] = UNSET if not isinstance(self.affects, Unset): affects = [] for affects_item_data in self.affects: - affects_item: Dict[str, Any] = UNSET + affects_item: dict[str, Any] = UNSET if not isinstance(affects_item_data, Unset): affects_item = affects_item_data.to_dict() affects.append(affects_item) - comments: List[Dict[str, Any]] = UNSET + comments: list[dict[str, Any]] = UNSET if not isinstance(self.comments, Unset): comments = [] for comments_item_data in self.comments: - comments_item: Dict[str, Any] = UNSET + comments_item: dict[str, Any] = UNSET if not isinstance(comments_item_data, Unset): comments_item = comments_item_data.to_dict() comments.append(comments_item) - package_versions: List[Dict[str, Any]] = UNSET + package_versions: list[dict[str, Any]] = UNSET if not isinstance(self.package_versions, Unset): package_versions = [] for package_versions_item_data in self.package_versions: - package_versions_item: Dict[str, Any] = UNSET + package_versions_item: dict[str, Any] = UNSET if not isinstance(package_versions_item_data, Unset): package_versions_item = package_versions_item_data.to_dict() package_versions.append(package_versions_item) - acknowledgments: List[Dict[str, Any]] = UNSET + acknowledgments: list[dict[str, Any]] = UNSET if not isinstance(self.acknowledgments, Unset): acknowledgments = [] for acknowledgments_item_data in self.acknowledgments: - acknowledgments_item: Dict[str, Any] = UNSET + acknowledgments_item: dict[str, Any] = UNSET if not isinstance(acknowledgments_item_data, Unset): acknowledgments_item = acknowledgments_item_data.to_dict() acknowledgments.append(acknowledgments_item) - references: List[Dict[str, Any]] = UNSET + references: list[dict[str, Any]] = UNSET if not isinstance(self.references, Unset): references = [] for references_item_data in self.references: - references_item: Dict[str, Any] = UNSET + references_item: dict[str, Any] = UNSET if not isinstance(references_item_data, Unset): references_item = references_item_data.to_dict() references.append(references_item) - cvss_scores: List[Dict[str, Any]] = UNSET + cvss_scores: list[dict[str, Any]] = UNSET if not isinstance(self.cvss_scores, Unset): cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() cvss_scores.append(cvss_scores_item) embargoed = self.embargoed + created_dt: str = UNSET if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() @@ -145,65 +195,72 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - classification: Dict[str, Any] = UNSET + classification: dict[str, Any] = UNSET if not isinstance(self.classification, Unset): classification = self.classification.to_dict() - alerts: List[Dict[str, Any]] = UNSET + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() alerts.append(alerts_item) - cve_id = self.cve_id + cve_id: Union[None, Unset, str] + if isinstance(self.cve_id, Unset): + cve_id = UNSET + else: + cve_id = self.cve_id + impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): impact = UNSET if not isinstance(self.impact, Unset): - impact = ImpactEnum(self.impact).value else: impact = UNSET if not isinstance(self.impact, Unset): - impact = BlankEnum(self.impact).value - components: Union[Unset, List[str]] = UNSET + components: Union[Unset, list[str]] = UNSET if not isinstance(self.components, Unset): components = self.components cve_description = self.cve_description + requires_cve_description: Union[Unset, str] if isinstance(self.requires_cve_description, Unset): requires_cve_description = UNSET elif isinstance(self.requires_cve_description, RequiresCveDescriptionEnum): requires_cve_description = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = RequiresCveDescriptionEnum( - self.requires_cve_description - ).value + requires_cve_description = RequiresCveDescriptionEnum(self.requires_cve_description).value else: requires_cve_description = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = BlankEnum( - self.requires_cve_description - ).value + requires_cve_description = BlankEnum(self.requires_cve_description).value statement = self.statement + cwe_id = self.cwe_id - unembargo_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.unembargo_dt, Unset): - unembargo_dt = self.unembargo_dt.isoformat() if self.unembargo_dt else None + + unembargo_dt: Union[None, Unset, str] + if isinstance(self.unembargo_dt, Unset): + unembargo_dt = UNSET + elif isinstance(self.unembargo_dt, datetime.datetime): + unembargo_dt = UNSET + if not isinstance(self.unembargo_dt, Unset): + unembargo_dt = self.unembargo_dt.isoformat() + + else: + unembargo_dt = self.unembargo_dt source: Union[Unset, str] if isinstance(self.source, Unset): @@ -211,44 +268,49 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.source, SourceBe0Enum): source = UNSET if not isinstance(self.source, Unset): - source = SourceBe0Enum(self.source).value else: source = UNSET if not isinstance(self.source, Unset): - source = BlankEnum(self.source).value - reported_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.reported_dt, Unset): - reported_dt = self.reported_dt.isoformat() if self.reported_dt else None + reported_dt: Union[None, Unset, str] + if isinstance(self.reported_dt, Unset): + reported_dt = UNSET + elif isinstance(self.reported_dt, datetime.datetime): + reported_dt = UNSET + if not isinstance(self.reported_dt, Unset): + reported_dt = self.reported_dt.isoformat() + + else: + reported_dt = self.reported_dt mitigation = self.mitigation + major_incident_state: Union[Unset, str] if isinstance(self.major_incident_state, Unset): major_incident_state = UNSET elif isinstance(self.major_incident_state, MajorIncidentStateEnum): major_incident_state = UNSET if not isinstance(self.major_incident_state, Unset): - - major_incident_state = MajorIncidentStateEnum( - self.major_incident_state - ).value + major_incident_state = MajorIncidentStateEnum(self.major_incident_state).value else: major_incident_state = UNSET if not isinstance(self.major_incident_state, Unset): - major_incident_state = BlankEnum(self.major_incident_state).value - major_incident_start_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.major_incident_start_dt, Unset): - major_incident_start_dt = ( - self.major_incident_start_dt.isoformat() - if self.major_incident_start_dt - else None - ) + major_incident_start_dt: Union[None, Unset, str] + if isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = UNSET + elif isinstance(self.major_incident_start_dt, datetime.datetime): + major_incident_start_dt = UNSET + if not isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = self.major_incident_start_dt.isoformat() + + else: + major_incident_start_dt = self.major_incident_start_dt nist_cvss_validation: Union[Unset, str] if isinstance(self.nist_cvss_validation, Unset): @@ -256,30 +318,32 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.nist_cvss_validation, NistCvssValidationEnum): nist_cvss_validation = UNSET if not isinstance(self.nist_cvss_validation, Unset): - - nist_cvss_validation = NistCvssValidationEnum( - self.nist_cvss_validation - ).value + nist_cvss_validation = NistCvssValidationEnum(self.nist_cvss_validation).value else: nist_cvss_validation = UNSET if not isinstance(self.nist_cvss_validation, Unset): - nist_cvss_validation = BlankEnum(self.nist_cvss_validation).value group_key = self.group_key + owner = self.owner + task_key = self.task_key + team_id = self.team_id + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid @@ -359,110 +423,112 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + from ..models.alert import Alert + from ..models.comment import Comment + from ..models.flaw_acknowledgment import FlawAcknowledgment + from ..models.flaw_classification import FlawClassification + from ..models.flaw_cvss import FlawCVSS + from ..models.flaw_reference import FlawReference + from ..models.package import Package + d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) title = d.pop("title", UNSET) - trackers = cast(List[str], d.pop("trackers", UNSET)) + trackers = cast(list[str], d.pop("trackers", UNSET)) comment_zero = d.pop("comment_zero", UNSET) affects = [] _affects = d.pop("affects", UNSET) - if _affects is UNSET: - affects = UNSET - else: - for affects_item_data in _affects or []: - _affects_item = affects_item_data - affects_item: Affect - if isinstance(_affects_item, Unset): - affects_item = UNSET - else: - affects_item = Affect.from_dict(_affects_item) + for affects_item_data in _affects or []: + # } + _affects_item = affects_item_data + affects_item: Affect + if isinstance(_affects_item, Unset): + affects_item = UNSET + else: + affects_item = Affect.from_dict(_affects_item) - affects.append(affects_item) + affects.append(affects_item) comments = [] _comments = d.pop("comments", UNSET) - if _comments is UNSET: - comments = UNSET - else: - for comments_item_data in _comments or []: - _comments_item = comments_item_data - comments_item: Comment - if isinstance(_comments_item, Unset): - comments_item = UNSET - else: - comments_item = Comment.from_dict(_comments_item) + for comments_item_data in _comments or []: + # } + _comments_item = comments_item_data + comments_item: Comment + if isinstance(_comments_item, Unset): + comments_item = UNSET + else: + comments_item = Comment.from_dict(_comments_item) - comments.append(comments_item) + comments.append(comments_item) package_versions = [] _package_versions = d.pop("package_versions", UNSET) - if _package_versions is UNSET: - package_versions = UNSET - else: - for package_versions_item_data in _package_versions or []: - _package_versions_item = package_versions_item_data - package_versions_item: Package - if isinstance(_package_versions_item, Unset): - package_versions_item = UNSET - else: - package_versions_item = Package.from_dict(_package_versions_item) + for package_versions_item_data in _package_versions or []: + # } + _package_versions_item = package_versions_item_data + package_versions_item: Package + if isinstance(_package_versions_item, Unset): + package_versions_item = UNSET + else: + package_versions_item = Package.from_dict(_package_versions_item) - package_versions.append(package_versions_item) + package_versions.append(package_versions_item) acknowledgments = [] _acknowledgments = d.pop("acknowledgments", UNSET) - if _acknowledgments is UNSET: - acknowledgments = UNSET - else: - for acknowledgments_item_data in _acknowledgments or []: - _acknowledgments_item = acknowledgments_item_data - acknowledgments_item: FlawAcknowledgment - if isinstance(_acknowledgments_item, Unset): - acknowledgments_item = UNSET - else: - acknowledgments_item = FlawAcknowledgment.from_dict( - _acknowledgments_item - ) + for acknowledgments_item_data in _acknowledgments or []: + # } + _acknowledgments_item = acknowledgments_item_data + acknowledgments_item: FlawAcknowledgment + if isinstance(_acknowledgments_item, Unset): + acknowledgments_item = UNSET + else: + acknowledgments_item = FlawAcknowledgment.from_dict(_acknowledgments_item) - acknowledgments.append(acknowledgments_item) + acknowledgments.append(acknowledgments_item) references = [] _references = d.pop("references", UNSET) - if _references is UNSET: - references = UNSET - else: - for references_item_data in _references or []: - _references_item = references_item_data - references_item: FlawReference - if isinstance(_references_item, Unset): - references_item = UNSET - else: - references_item = FlawReference.from_dict(_references_item) + for references_item_data in _references or []: + # } + _references_item = references_item_data + references_item: FlawReference + if isinstance(_references_item, Unset): + references_item = UNSET + else: + references_item = FlawReference.from_dict(_references_item) - references.append(references_item) + references.append(references_item) cvss_scores = [] _cvss_scores = d.pop("cvss_scores", UNSET) - if _cvss_scores is UNSET: - cvss_scores = UNSET - else: - for cvss_scores_item_data in _cvss_scores or []: - _cvss_scores_item = cvss_scores_item_data - cvss_scores_item: FlawCVSS - if isinstance(_cvss_scores_item, Unset): - cvss_scores_item = UNSET - else: - cvss_scores_item = FlawCVSS.from_dict(_cvss_scores_item) + for cvss_scores_item_data in _cvss_scores or []: + # } + _cvss_scores_item = cvss_scores_item_data + cvss_scores_item: FlawCVSS + if isinstance(_cvss_scores_item, Unset): + cvss_scores_item = UNSET + else: + cvss_scores_item = FlawCVSS.from_dict(_cvss_scores_item) - cvss_scores.append(cvss_scores_item) + cvss_scores.append(cvss_scores_item) embargoed = d.pop("embargoed", UNSET) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -470,6 +536,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -477,6 +544,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) + # } _classification = d.pop("classification", UNSET) classification: FlawClassification if isinstance(_classification, Unset): @@ -486,20 +554,25 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) - alerts.append(alerts_item) + alerts.append(alerts_item) - cve_id = d.pop("cve_id", UNSET) + def _parse_cve_id(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + cve_id = _parse_cve_id(d.pop("cve_id", UNSET)) def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: if isinstance(data, Unset): @@ -507,8 +580,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _impact_type_0 = data - impact_type_0: Union[Unset, ImpactEnum] + impact_type_0: ImpactEnum if isinstance(_impact_type_0, Unset): impact_type_0 = UNSET else: @@ -519,8 +593,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _impact_type_1 = data - impact_type_1: Union[Unset, BlankEnum] + impact_type_1: BlankEnum if isinstance(_impact_type_1, Unset): impact_type_1 = UNSET else: @@ -530,61 +605,67 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: impact = _parse_impact(d.pop("impact", UNSET)) - components = cast(List[str], d.pop("components", UNSET)) + components = cast(list[str], d.pop("components", UNSET)) cve_description = d.pop("cve_description", UNSET) - def _parse_requires_cve_description( - data: object, - ) -> Union[BlankEnum, RequiresCveDescriptionEnum, Unset]: + def _parse_requires_cve_description(data: object) -> Union[BlankEnum, RequiresCveDescriptionEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _requires_cve_description_type_0 = data - requires_cve_description_type_0: Union[ - Unset, RequiresCveDescriptionEnum - ] + requires_cve_description_type_0: RequiresCveDescriptionEnum if isinstance(_requires_cve_description_type_0, Unset): requires_cve_description_type_0 = UNSET else: - requires_cve_description_type_0 = RequiresCveDescriptionEnum( - _requires_cve_description_type_0 - ) + requires_cve_description_type_0 = RequiresCveDescriptionEnum(_requires_cve_description_type_0) return requires_cve_description_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _requires_cve_description_type_1 = data - requires_cve_description_type_1: Union[Unset, BlankEnum] + requires_cve_description_type_1: BlankEnum if isinstance(_requires_cve_description_type_1, Unset): requires_cve_description_type_1 = UNSET else: - requires_cve_description_type_1 = BlankEnum( - _requires_cve_description_type_1 - ) + requires_cve_description_type_1 = BlankEnum(_requires_cve_description_type_1) return requires_cve_description_type_1 - requires_cve_description = _parse_requires_cve_description( - d.pop("requires_cve_description", UNSET) - ) + requires_cve_description = _parse_requires_cve_description(d.pop("requires_cve_description", UNSET)) statement = d.pop("statement", UNSET) cwe_id = d.pop("cwe_id", UNSET) - _unembargo_dt = d.pop("unembargo_dt", UNSET) - unembargo_dt: Union[Unset, None, datetime.datetime] - if _unembargo_dt is None: - unembargo_dt = None - elif isinstance(_unembargo_dt, Unset): - unembargo_dt = UNSET - else: - unembargo_dt = isoparse(_unembargo_dt) + def _parse_unembargo_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _unembargo_dt_type_0 = data + unembargo_dt_type_0: datetime.datetime + if isinstance(_unembargo_dt_type_0, Unset): + unembargo_dt_type_0 = UNSET + else: + unembargo_dt_type_0 = isoparse(_unembargo_dt_type_0) + + return unembargo_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + unembargo_dt = _parse_unembargo_dt(d.pop("unembargo_dt", UNSET)) def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: if isinstance(data, Unset): @@ -592,8 +673,9 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _source_type_0 = data - source_type_0: Union[Unset, SourceBe0Enum] + source_type_0: SourceBe0Enum if isinstance(_source_type_0, Unset): source_type_0 = UNSET else: @@ -604,8 +686,9 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _source_type_1 = data - source_type_1: Union[Unset, BlankEnum] + source_type_1: BlankEnum if isinstance(_source_type_1, Unset): source_type_1 = UNSET else: @@ -615,41 +698,53 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: source = _parse_source(d.pop("source", UNSET)) - _reported_dt = d.pop("reported_dt", UNSET) - reported_dt: Union[Unset, None, datetime.datetime] - if _reported_dt is None: - reported_dt = None - elif isinstance(_reported_dt, Unset): - reported_dt = UNSET - else: - reported_dt = isoparse(_reported_dt) + def _parse_reported_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _reported_dt_type_0 = data + reported_dt_type_0: datetime.datetime + if isinstance(_reported_dt_type_0, Unset): + reported_dt_type_0 = UNSET + else: + reported_dt_type_0 = isoparse(_reported_dt_type_0) + + return reported_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + reported_dt = _parse_reported_dt(d.pop("reported_dt", UNSET)) mitigation = d.pop("mitigation", UNSET) - def _parse_major_incident_state( - data: object, - ) -> Union[BlankEnum, MajorIncidentStateEnum, Unset]: + def _parse_major_incident_state(data: object) -> Union[BlankEnum, MajorIncidentStateEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _major_incident_state_type_0 = data - major_incident_state_type_0: Union[Unset, MajorIncidentStateEnum] + major_incident_state_type_0: MajorIncidentStateEnum if isinstance(_major_incident_state_type_0, Unset): major_incident_state_type_0 = UNSET else: - major_incident_state_type_0 = MajorIncidentStateEnum( - _major_incident_state_type_0 - ) + major_incident_state_type_0 = MajorIncidentStateEnum(_major_incident_state_type_0) return major_incident_state_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _major_incident_state_type_1 = data - major_incident_state_type_1: Union[Unset, BlankEnum] + major_incident_state_type_1: BlankEnum if isinstance(_major_incident_state_type_1, Unset): major_incident_state_type_1 = UNSET else: @@ -657,43 +752,53 @@ def _parse_major_incident_state( return major_incident_state_type_1 - major_incident_state = _parse_major_incident_state( - d.pop("major_incident_state", UNSET) - ) + major_incident_state = _parse_major_incident_state(d.pop("major_incident_state", UNSET)) - _major_incident_start_dt = d.pop("major_incident_start_dt", UNSET) - major_incident_start_dt: Union[Unset, None, datetime.datetime] - if _major_incident_start_dt is None: - major_incident_start_dt = None - elif isinstance(_major_incident_start_dt, Unset): - major_incident_start_dt = UNSET - else: - major_incident_start_dt = isoparse(_major_incident_start_dt) + def _parse_major_incident_start_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _major_incident_start_dt_type_0 = data + major_incident_start_dt_type_0: datetime.datetime + if isinstance(_major_incident_start_dt_type_0, Unset): + major_incident_start_dt_type_0 = UNSET + else: + major_incident_start_dt_type_0 = isoparse(_major_incident_start_dt_type_0) - def _parse_nist_cvss_validation( - data: object, - ) -> Union[BlankEnum, NistCvssValidationEnum, Unset]: + return major_incident_start_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + major_incident_start_dt = _parse_major_incident_start_dt(d.pop("major_incident_start_dt", UNSET)) + + def _parse_nist_cvss_validation(data: object) -> Union[BlankEnum, NistCvssValidationEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _nist_cvss_validation_type_0 = data - nist_cvss_validation_type_0: Union[Unset, NistCvssValidationEnum] + nist_cvss_validation_type_0: NistCvssValidationEnum if isinstance(_nist_cvss_validation_type_0, Unset): nist_cvss_validation_type_0 = UNSET else: - nist_cvss_validation_type_0 = NistCvssValidationEnum( - _nist_cvss_validation_type_0 - ) + nist_cvss_validation_type_0 = NistCvssValidationEnum(_nist_cvss_validation_type_0) return nist_cvss_validation_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _nist_cvss_validation_type_1 = data - nist_cvss_validation_type_1: Union[Unset, BlankEnum] + nist_cvss_validation_type_1: BlankEnum if isinstance(_nist_cvss_validation_type_1, Unset): nist_cvss_validation_type_1 = UNSET else: @@ -701,9 +806,7 @@ def _parse_nist_cvss_validation( return nist_cvss_validation_type_1 - nist_cvss_validation = _parse_nist_cvss_validation( - d.pop("nist_cvss_validation", UNSET) - ) + nist_cvss_validation = _parse_nist_cvss_validation(d.pop("nist_cvss_validation", UNSET)) group_key = d.pop("group_key", UNSET) @@ -713,6 +816,7 @@ def _parse_nist_cvss_validation( team_id = d.pop("team_id", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -772,34 +876,34 @@ def _parse_nist_cvss_validation( @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, "title": str, - "trackers": List[str], + "trackers": list[str], "comment_zero": str, - "affects": List[Affect], - "comments": List[Comment], - "package_versions": List[Package], - "acknowledgments": List[FlawAcknowledgment], - "references": List[FlawReference], - "cvss_scores": List[FlawCVSS], + "affects": list["Affect"], + "comments": list["Comment"], + "package_versions": list["Package"], + "acknowledgments": list["FlawAcknowledgment"], + "references": list["FlawReference"], + "cvss_scores": list["FlawCVSS"], "embargoed": bool, "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "classification": FlawClassification, - "alerts": List[Alert], - "cve_id": str, + "alerts": list["Alert"], + "cve_id": Union[None, str], "impact": Union[BlankEnum, ImpactEnum], - "components": List[str], + "components": list[str], "cve_description": str, "requires_cve_description": Union[BlankEnum, RequiresCveDescriptionEnum], "statement": str, "cwe_id": str, - "unembargo_dt": datetime.datetime, + "unembargo_dt": Union[None, datetime.datetime], "source": Union[BlankEnum, SourceBe0Enum], - "reported_dt": datetime.datetime, + "reported_dt": Union[None, datetime.datetime], "mitigation": str, "major_incident_state": Union[BlankEnum, MajorIncidentStateEnum], - "major_incident_start_dt": datetime.datetime, + "major_incident_start_dt": Union[None, datetime.datetime], "nist_cvss_validation": Union[BlankEnum, NistCvssValidationEnum], "group_key": str, "owner": str, @@ -812,7 +916,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_create_response_201.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_create_response_201.py index fcacfee..09cbe7c 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_create_response_201.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_create_response_201.py @@ -1,58 +1,86 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.cvss_version_enum import CvssVersionEnum from ..models.issuer_enum import IssuerEnum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1FlawsCvssScoresCreateResponse201") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsCvssScoresCreateResponse201(OSIDBModel): - """ """ + """ + Attributes: + cvss_version (CvssVersionEnum): + issuer (IssuerEnum): + score (float): + uuid (UUID): + vector (str): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + flaw (Union[Unset, UUID]): + comment (Union[None, Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ cvss_version: CvssVersionEnum issuer: IssuerEnum score: float - uuid: str + uuid: UUID vector: str embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - flaw: Union[Unset, str] = UNSET - comment: Union[Unset, None, str] = UNSET + flaw: Union[Unset, UUID] = UNSET + comment: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cvss_version: str = UNSET if not isinstance(self.cvss_version, Unset): - cvss_version = CvssVersionEnum(self.cvss_version).value issuer: str = UNSET if not isinstance(self.issuer, Unset): - issuer = IssuerEnum(self.issuer).value score = self.score - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + vector = self.vector + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -66,17 +94,27 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - flaw = self.flaw - comment = self.comment + flaw: Union[Unset, str] = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + comment: Union[None, Unset, str] + if isinstance(self.comment, Unset): + comment = UNSET + else: + comment = self.comment + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version @@ -112,8 +150,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() + # } _cvss_version = d.pop("cvss_version", UNSET) cvss_version: CvssVersionEnum if isinstance(_cvss_version, Unset): @@ -121,6 +162,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cvss_version = CvssVersionEnum(_cvss_version) + # } _issuer = d.pop("issuer", UNSET) issuer: IssuerEnum if isinstance(_issuer, Unset): @@ -130,7 +172,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: score = d.pop("score", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) vector = d.pop("vector", UNSET) @@ -138,19 +186,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -158,6 +205,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -165,10 +213,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: Union[Unset, UUID] + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) + + def _parse_comment(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) - comment = d.pop("comment", UNSET) + comment = _parse_comment(d.pop("comment", UNSET)) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -209,14 +271,14 @@ def get_fields(): "cvss_version": CvssVersionEnum, "issuer": IssuerEnum, "score": float, - "uuid": str, + "uuid": UUID, "vector": str, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "flaw": str, - "comment": str, + "flaw": UUID, + "comment": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -224,7 +286,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_destroy_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_destroy_response_200.py index 4613f04..c8ab21a 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_destroy_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_destroy_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="OsidbApiV1FlawsCvssScoresDestroyResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsCvssScoresDestroyResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_list_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_list_response_200.py index 887c3b1..4eb6656 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw_cvss import FlawCVSS from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_cvss import FlawCVSS + + T = TypeVar("T", bound="OsidbApiV1FlawsCvssScoresListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsCvssScoresListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['FlawCVSS']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[FlawCVSS] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["FlawCVSS"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_cvss import FlawCVSS + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: FlawCVSS - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = FlawCVSS.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: FlawCVSS + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = FlawCVSS.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[FlawCVSS], - "next": str, - "previous": str, + "results": list["FlawCVSS"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_retrieve_response_200.py index 86c17ec..c3a8c49 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_retrieve_response_200.py @@ -1,58 +1,86 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.cvss_version_enum import CvssVersionEnum from ..models.issuer_enum import IssuerEnum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1FlawsCvssScoresRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsCvssScoresRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + cvss_version (CvssVersionEnum): + issuer (IssuerEnum): + score (float): + uuid (UUID): + vector (str): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + flaw (Union[Unset, UUID]): + comment (Union[None, Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ cvss_version: CvssVersionEnum issuer: IssuerEnum score: float - uuid: str + uuid: UUID vector: str embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - flaw: Union[Unset, str] = UNSET - comment: Union[Unset, None, str] = UNSET + flaw: Union[Unset, UUID] = UNSET + comment: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cvss_version: str = UNSET if not isinstance(self.cvss_version, Unset): - cvss_version = CvssVersionEnum(self.cvss_version).value issuer: str = UNSET if not isinstance(self.issuer, Unset): - issuer = IssuerEnum(self.issuer).value score = self.score - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + vector = self.vector + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -66,17 +94,27 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - flaw = self.flaw - comment = self.comment + flaw: Union[Unset, str] = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + comment: Union[None, Unset, str] + if isinstance(self.comment, Unset): + comment = UNSET + else: + comment = self.comment + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version @@ -112,8 +150,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() + # } _cvss_version = d.pop("cvss_version", UNSET) cvss_version: CvssVersionEnum if isinstance(_cvss_version, Unset): @@ -121,6 +162,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cvss_version = CvssVersionEnum(_cvss_version) + # } _issuer = d.pop("issuer", UNSET) issuer: IssuerEnum if isinstance(_issuer, Unset): @@ -130,7 +172,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: score = d.pop("score", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) vector = d.pop("vector", UNSET) @@ -138,19 +186,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -158,6 +205,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -165,10 +213,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: Union[Unset, UUID] + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) + + def _parse_comment(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) - comment = d.pop("comment", UNSET) + comment = _parse_comment(d.pop("comment", UNSET)) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -209,14 +271,14 @@ def get_fields(): "cvss_version": CvssVersionEnum, "issuer": IssuerEnum, "score": float, - "uuid": str, + "uuid": UUID, "vector": str, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "flaw": str, - "comment": str, + "flaw": UUID, + "comment": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -224,7 +286,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_update_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_update_response_200.py index 5cb1c7b..cb103f9 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_update_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_cvss_scores_update_response_200.py @@ -1,58 +1,86 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.cvss_version_enum import CvssVersionEnum from ..models.issuer_enum import IssuerEnum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1FlawsCvssScoresUpdateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsCvssScoresUpdateResponse200(OSIDBModel): - """ """ + """ + Attributes: + cvss_version (CvssVersionEnum): + issuer (IssuerEnum): + score (float): + uuid (UUID): + vector (str): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + flaw (Union[Unset, UUID]): + comment (Union[None, Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ cvss_version: CvssVersionEnum issuer: IssuerEnum score: float - uuid: str + uuid: UUID vector: str embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - flaw: Union[Unset, str] = UNSET - comment: Union[Unset, None, str] = UNSET + flaw: Union[Unset, UUID] = UNSET + comment: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: cvss_version: str = UNSET if not isinstance(self.cvss_version, Unset): - cvss_version = CvssVersionEnum(self.cvss_version).value issuer: str = UNSET if not isinstance(self.issuer, Unset): - issuer = IssuerEnum(self.issuer).value score = self.score - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + vector = self.vector + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -66,17 +94,27 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - flaw = self.flaw - comment = self.comment + flaw: Union[Unset, str] = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + comment: Union[None, Unset, str] + if isinstance(self.comment, Unset): + comment = UNSET + else: + comment = self.comment + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(cvss_version, Unset): field_dict["cvss_version"] = cvss_version @@ -112,8 +150,11 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() + # } _cvss_version = d.pop("cvss_version", UNSET) cvss_version: CvssVersionEnum if isinstance(_cvss_version, Unset): @@ -121,6 +162,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: cvss_version = CvssVersionEnum(_cvss_version) + # } _issuer = d.pop("issuer", UNSET) issuer: IssuerEnum if isinstance(_issuer, Unset): @@ -130,7 +172,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: score = d.pop("score", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) vector = d.pop("vector", UNSET) @@ -138,19 +186,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -158,6 +205,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -165,10 +213,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: Union[Unset, UUID] + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) + + def _parse_comment(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) - comment = d.pop("comment", UNSET) + comment = _parse_comment(d.pop("comment", UNSET)) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -209,14 +271,14 @@ def get_fields(): "cvss_version": CvssVersionEnum, "issuer": IssuerEnum, "score": float, - "uuid": str, + "uuid": UUID, "vector": str, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "flaw": str, - "comment": str, + "flaw": UUID, + "comment": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -224,7 +286,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_affects_affectedness.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_affects_affectedness.py index 8193ee1..4b0ec0f 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_affects_affectedness.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_affects_affectedness.py @@ -2,10 +2,10 @@ class OsidbApiV1FlawsListAffectsAffectedness(str, Enum): - VALUE_0 = "" AFFECTED = "AFFECTED" NEW = "NEW" NOTAFFECTED = "NOTAFFECTED" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_affects_impact.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_affects_impact.py index 450e54c..a9df79b 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_affects_impact.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_affects_impact.py @@ -2,11 +2,11 @@ class OsidbApiV1FlawsListAffectsImpact(str, Enum): - VALUE_0 = "" CRITICAL = "CRITICAL" IMPORTANT = "IMPORTANT" LOW = "LOW" MODERATE = "MODERATE" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_affects_resolution.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_affects_resolution.py index 89ae53b..3d0bea5 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_affects_resolution.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_affects_resolution.py @@ -2,11 +2,11 @@ class OsidbApiV1FlawsListAffectsResolution(str, Enum): - VALUE_0 = "" DEFER = "DEFER" DELEGATED = "DELEGATED" FIX = "FIX" OOSS = "OOSS" + VALUE_0 = "" WONTFIX = "WONTFIX" WONTREPORT = "WONTREPORT" diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_impact.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_impact.py index 72b77df..9692bd5 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_impact.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_impact.py @@ -2,11 +2,11 @@ class OsidbApiV1FlawsListImpact(str, Enum): - VALUE_0 = "" CRITICAL = "CRITICAL" IMPORTANT = "IMPORTANT" LOW = "LOW" MODERATE = "MODERATE" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_major_incident_state.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_major_incident_state.py index 8a5485c..ec51986 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_major_incident_state.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_major_incident_state.py @@ -2,13 +2,13 @@ class OsidbApiV1FlawsListMajorIncidentState(str, Enum): - VALUE_0 = "" APPROVED = "APPROVED" CISA_APPROVED = "CISA_APPROVED" INVALID = "INVALID" MINOR = "MINOR" REJECTED = "REJECTED" REQUESTED = "REQUESTED" + VALUE_0 = "" ZERO_DAY = "ZERO_DAY" def __str__(self) -> str: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_nist_cvss_validation.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_nist_cvss_validation.py index d8bbeda..f5d3a6a 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_nist_cvss_validation.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_nist_cvss_validation.py @@ -2,10 +2,10 @@ class OsidbApiV1FlawsListNistCvssValidation(str, Enum): - VALUE_0 = "" APPROVED = "APPROVED" REJECTED = "REJECTED" REQUESTED = "REQUESTED" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_order_item.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_order_item.py index 0ea2c7c..6f7f6ad 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_order_item.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_order_item.py @@ -2,67 +2,6 @@ class OsidbApiV1FlawsListOrderItem(str, Enum): - VALUE_0 = "-acknowledgments__affiliation" - VALUE_1 = "-acknowledgments__created_dt" - VALUE_2 = "-acknowledgments__from_upstream" - VALUE_3 = "-acknowledgments__name" - VALUE_4 = "-acknowledgments__updated_dt" - VALUE_5 = "-acknowledgments__uuid" - VALUE_6 = "-affects__affectedness" - VALUE_7 = "-affects__created_dt" - VALUE_8 = "-affects__impact" - VALUE_9 = "-affects__ps_component" - VALUE_10 = "-affects__ps_module" - VALUE_11 = "-affects__resolution" - VALUE_12 = "-affects__trackers__created_dt" - VALUE_13 = "-affects__trackers__errata__advisory_name" - VALUE_14 = "-affects__trackers__errata__et_id" - VALUE_15 = "-affects__trackers__errata__shipped_dt" - VALUE_16 = "-affects__trackers__external_system_id" - VALUE_17 = "-affects__trackers__ps_update_stream" - VALUE_18 = "-affects__trackers__resolution" - VALUE_19 = "-affects__trackers__status" - VALUE_20 = "-affects__trackers__type" - VALUE_21 = "-affects__trackers__updated_dt" - VALUE_22 = "-affects__trackers__uuid" - VALUE_23 = "-affects__updated_dt" - VALUE_24 = "-affects__uuid" - VALUE_25 = "-bz_id" - VALUE_26 = "-comment_zero" - VALUE_27 = "-components" - VALUE_28 = "-created_dt" - VALUE_29 = "-cve_description" - VALUE_30 = "-cve_id" - VALUE_31 = "-cvss_scores__comment" - VALUE_32 = "-cvss_scores__created_dt" - VALUE_33 = "-cvss_scores__issuer" - VALUE_34 = "-cvss_scores__score" - VALUE_35 = "-cvss_scores__updated_dt" - VALUE_36 = "-cvss_scores__uuid" - VALUE_37 = "-cvss_scores__vector" - VALUE_38 = "-cwe_id" - VALUE_39 = "-embargoed" - VALUE_40 = "-impact" - VALUE_41 = "-major_incident_start_dt" - VALUE_42 = "-major_incident_state" - VALUE_43 = "-nist_cvss_validation" - VALUE_44 = "-owner" - VALUE_45 = "-references__created_dt" - VALUE_46 = "-references__description" - VALUE_47 = "-references__type" - VALUE_48 = "-references__updated_dt" - VALUE_49 = "-references__url" - VALUE_50 = "-references__uuid" - VALUE_51 = "-reported_dt" - VALUE_52 = "-requires_cve_description" - VALUE_53 = "-source" - VALUE_54 = "-statement" - VALUE_55 = "-team_id" - VALUE_56 = "-title" - VALUE_57 = "-unembargo_dt" - VALUE_58 = "-updated_dt" - VALUE_59 = "-uuid" - VALUE_60 = "-workflow_state" ACKNOWLEDGMENTS_AFFILIATION = "acknowledgments__affiliation" ACKNOWLEDGMENTS_CREATED_DT = "acknowledgments__created_dt" ACKNOWLEDGMENTS_FROM_UPSTREAM = "acknowledgments__from_upstream" @@ -123,6 +62,67 @@ class OsidbApiV1FlawsListOrderItem(str, Enum): UNEMBARGO_DT = "unembargo_dt" UPDATED_DT = "updated_dt" UUID = "uuid" + VALUE_0 = "-acknowledgments__affiliation" + VALUE_1 = "-acknowledgments__created_dt" + VALUE_10 = "-affects__ps_module" + VALUE_11 = "-affects__resolution" + VALUE_12 = "-affects__trackers__created_dt" + VALUE_13 = "-affects__trackers__errata__advisory_name" + VALUE_14 = "-affects__trackers__errata__et_id" + VALUE_15 = "-affects__trackers__errata__shipped_dt" + VALUE_16 = "-affects__trackers__external_system_id" + VALUE_17 = "-affects__trackers__ps_update_stream" + VALUE_18 = "-affects__trackers__resolution" + VALUE_19 = "-affects__trackers__status" + VALUE_2 = "-acknowledgments__from_upstream" + VALUE_20 = "-affects__trackers__type" + VALUE_21 = "-affects__trackers__updated_dt" + VALUE_22 = "-affects__trackers__uuid" + VALUE_23 = "-affects__updated_dt" + VALUE_24 = "-affects__uuid" + VALUE_25 = "-bz_id" + VALUE_26 = "-comment_zero" + VALUE_27 = "-components" + VALUE_28 = "-created_dt" + VALUE_29 = "-cve_description" + VALUE_3 = "-acknowledgments__name" + VALUE_30 = "-cve_id" + VALUE_31 = "-cvss_scores__comment" + VALUE_32 = "-cvss_scores__created_dt" + VALUE_33 = "-cvss_scores__issuer" + VALUE_34 = "-cvss_scores__score" + VALUE_35 = "-cvss_scores__updated_dt" + VALUE_36 = "-cvss_scores__uuid" + VALUE_37 = "-cvss_scores__vector" + VALUE_38 = "-cwe_id" + VALUE_39 = "-embargoed" + VALUE_4 = "-acknowledgments__updated_dt" + VALUE_40 = "-impact" + VALUE_41 = "-major_incident_start_dt" + VALUE_42 = "-major_incident_state" + VALUE_43 = "-nist_cvss_validation" + VALUE_44 = "-owner" + VALUE_45 = "-references__created_dt" + VALUE_46 = "-references__description" + VALUE_47 = "-references__type" + VALUE_48 = "-references__updated_dt" + VALUE_49 = "-references__url" + VALUE_5 = "-acknowledgments__uuid" + VALUE_50 = "-references__uuid" + VALUE_51 = "-reported_dt" + VALUE_52 = "-requires_cve_description" + VALUE_53 = "-source" + VALUE_54 = "-statement" + VALUE_55 = "-team_id" + VALUE_56 = "-title" + VALUE_57 = "-unembargo_dt" + VALUE_58 = "-updated_dt" + VALUE_59 = "-uuid" + VALUE_6 = "-affects__affectedness" + VALUE_60 = "-workflow_state" + VALUE_7 = "-affects__created_dt" + VALUE_8 = "-affects__impact" + VALUE_9 = "-affects__ps_component" WORKFLOW_STATE = "workflow_state" def __str__(self) -> str: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_requires_cve_description.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_requires_cve_description.py index 1ed3e40..2f094bb 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_requires_cve_description.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_requires_cve_description.py @@ -2,10 +2,10 @@ class OsidbApiV1FlawsListRequiresCveDescription(str, Enum): - VALUE_0 = "" APPROVED = "APPROVED" REJECTED = "REJECTED" REQUESTED = "REQUESTED" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_response_200.py index d51abed..2c34b87 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw import Flaw from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw import Flaw + + T = TypeVar("T", bound="OsidbApiV1FlawsListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['Flaw']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[Flaw] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["Flaw"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw import Flaw + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Flaw - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Flaw.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Flaw + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Flaw.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[Flaw], - "next": str, - "previous": str, + "results": list["Flaw"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_source.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_source.py index 9fea818..a4ba91a 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_source.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_source.py @@ -2,7 +2,6 @@ class OsidbApiV1FlawsListSource(str, Enum): - VALUE_0 = "" ADOBE = "ADOBE" APPLE = "APPLE" ASF = "ASF" @@ -86,6 +85,7 @@ class OsidbApiV1FlawsListSource(str, Enum): TWITTER = "TWITTER" UBUNTU = "UBUNTU" UPSTREAM = "UPSTREAM" + VALUE_0 = "" VENDORSEC = "VENDORSEC" VULNWATCH = "VULNWATCH" WIRESHARK = "WIRESHARK" diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_workflow_state_item.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_workflow_state_item.py index 1cb9d86..bed36b1 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_workflow_state_item.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_list_workflow_state_item.py @@ -2,13 +2,13 @@ class OsidbApiV1FlawsListWorkflowStateItem(str, Enum): - VALUE_0 = "" DONE = "DONE" NEW = "NEW" PRE_SECONDARY_ASSESSMENT = "PRE_SECONDARY_ASSESSMENT" REJECTED = "REJECTED" SECONDARY_ASSESSMENT = "SECONDARY_ASSESSMENT" TRIAGE = "TRIAGE" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_create_response_201.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_create_response_201.py index 96e661e..301e1e4 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_create_response_201.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_create_response_201.py @@ -1,23 +1,43 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw_version import FlawVersion from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_version import FlawVersion + + T = TypeVar("T", bound="OsidbApiV1FlawsPackageVersionsCreateResponse201") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsPackageVersionsCreateResponse201(OSIDBModel): - """ """ + """ + Attributes: + package (str): + versions (list['FlawVersion']): + flaw (UUID): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ package: str - versions: List[FlawVersion] - flaw: str - uuid: str + versions: list["FlawVersion"] + flaw: UUID + uuid: UUID embargoed: bool created_dt: datetime.datetime updated_dt: datetime.datetime @@ -25,23 +45,31 @@ class OsidbApiV1FlawsPackageVersionsCreateResponse201(OSIDBModel): env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: package = self.package - versions: List[Dict[str, Any]] = UNSET + + versions: list[dict[str, Any]] = UNSET if not isinstance(self.versions, Unset): versions = [] for versions_item_data in self.versions: - versions_item: Dict[str, Any] = UNSET + versions_item: dict[str, Any] = UNSET if not isinstance(versions_item_data, Unset): versions_item = versions_item_data.to_dict() versions.append(versions_item) - flaw = self.flaw - uuid = self.uuid + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed + created_dt: str = UNSET if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() @@ -55,10 +83,12 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(package, Unset): field_dict["package"] = package @@ -86,31 +116,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_version import FlawVersion + d = src_dict.copy() package = d.pop("package", UNSET) versions = [] _versions = d.pop("versions", UNSET) - if _versions is UNSET: - versions = UNSET + for versions_item_data in _versions or []: + # } + _versions_item = versions_item_data + versions_item: FlawVersion + if isinstance(_versions_item, Unset): + versions_item = UNSET + else: + versions_item = FlawVersion.from_dict(_versions_item) + + versions.append(versions_item) + + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET else: - for versions_item_data in _versions or []: - _versions_item = versions_item_data - versions_item: FlawVersion - if isinstance(_versions_item, Unset): - versions_item = UNSET - else: - versions_item = FlawVersion.from_dict(_versions_item) - - versions.append(versions_item) + flaw = UUID(_flaw) - flaw = d.pop("flaw", UNSET) - - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -118,6 +161,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -125,6 +169,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -152,18 +197,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: version=version, ) - osidb_api_v1_flaws_package_versions_create_response_201.additional_properties = ( - d - ) + osidb_api_v1_flaws_package_versions_create_response_201.additional_properties = d return osidb_api_v1_flaws_package_versions_create_response_201 @staticmethod def get_fields(): return { "package": str, - "versions": List[FlawVersion], - "flaw": str, - "uuid": str, + "versions": list["FlawVersion"], + "flaw": UUID, + "uuid": UUID, "embargoed": bool, "created_dt": datetime.datetime, "updated_dt": datetime.datetime, @@ -174,7 +217,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_destroy_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_destroy_response_200.py index 70e4926..6e9c7a3 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_destroy_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_destroy_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="OsidbApiV1FlawsPackageVersionsDestroyResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsPackageVersionsDestroyResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -64,9 +74,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: version=version, ) - osidb_api_v1_flaws_package_versions_destroy_response_200.additional_properties = ( - d - ) + osidb_api_v1_flaws_package_versions_destroy_response_200.additional_properties = d return osidb_api_v1_flaws_package_versions_destroy_response_200 @staticmethod @@ -79,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_list_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_list_response_200.py index 87472f0..eae9e12 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw_package_version import FlawPackageVersion from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_package_version import FlawPackageVersion + + T = TypeVar("T", bound="OsidbApiV1FlawsPackageVersionsListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsPackageVersionsListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['FlawPackageVersion']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[FlawPackageVersion] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["FlawPackageVersion"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_package_version import FlawPackageVersion + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: FlawPackageVersion - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = FlawPackageVersion.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: FlawPackageVersion + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = FlawPackageVersion.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[FlawPackageVersion], - "next": str, - "previous": str, + "results": list["FlawPackageVersion"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_retrieve_response_200.py index 4e766af..28a10f1 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_retrieve_response_200.py @@ -1,23 +1,43 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw_version import FlawVersion from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_version import FlawVersion + + T = TypeVar("T", bound="OsidbApiV1FlawsPackageVersionsRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsPackageVersionsRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + package (str): + versions (list['FlawVersion']): + flaw (UUID): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ package: str - versions: List[FlawVersion] - flaw: str - uuid: str + versions: list["FlawVersion"] + flaw: UUID + uuid: UUID embargoed: bool created_dt: datetime.datetime updated_dt: datetime.datetime @@ -25,23 +45,31 @@ class OsidbApiV1FlawsPackageVersionsRetrieveResponse200(OSIDBModel): env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: package = self.package - versions: List[Dict[str, Any]] = UNSET + + versions: list[dict[str, Any]] = UNSET if not isinstance(self.versions, Unset): versions = [] for versions_item_data in self.versions: - versions_item: Dict[str, Any] = UNSET + versions_item: dict[str, Any] = UNSET if not isinstance(versions_item_data, Unset): versions_item = versions_item_data.to_dict() versions.append(versions_item) - flaw = self.flaw - uuid = self.uuid + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed + created_dt: str = UNSET if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() @@ -55,10 +83,12 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(package, Unset): field_dict["package"] = package @@ -86,31 +116,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_version import FlawVersion + d = src_dict.copy() package = d.pop("package", UNSET) versions = [] _versions = d.pop("versions", UNSET) - if _versions is UNSET: - versions = UNSET + for versions_item_data in _versions or []: + # } + _versions_item = versions_item_data + versions_item: FlawVersion + if isinstance(_versions_item, Unset): + versions_item = UNSET + else: + versions_item = FlawVersion.from_dict(_versions_item) + + versions.append(versions_item) + + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET else: - for versions_item_data in _versions or []: - _versions_item = versions_item_data - versions_item: FlawVersion - if isinstance(_versions_item, Unset): - versions_item = UNSET - else: - versions_item = FlawVersion.from_dict(_versions_item) - - versions.append(versions_item) + flaw = UUID(_flaw) - flaw = d.pop("flaw", UNSET) - - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -118,6 +161,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -125,6 +169,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -152,18 +197,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: version=version, ) - osidb_api_v1_flaws_package_versions_retrieve_response_200.additional_properties = ( - d - ) + osidb_api_v1_flaws_package_versions_retrieve_response_200.additional_properties = d return osidb_api_v1_flaws_package_versions_retrieve_response_200 @staticmethod def get_fields(): return { "package": str, - "versions": List[FlawVersion], - "flaw": str, - "uuid": str, + "versions": list["FlawVersion"], + "flaw": UUID, + "uuid": UUID, "embargoed": bool, "created_dt": datetime.datetime, "updated_dt": datetime.datetime, @@ -174,7 +217,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_update_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_update_response_200.py index 55dcee0..4843520 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_update_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_package_versions_update_response_200.py @@ -1,23 +1,43 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw_version import FlawVersion from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_version import FlawVersion + + T = TypeVar("T", bound="OsidbApiV1FlawsPackageVersionsUpdateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsPackageVersionsUpdateResponse200(OSIDBModel): - """ """ + """ + Attributes: + package (str): + versions (list['FlawVersion']): + flaw (UUID): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ package: str - versions: List[FlawVersion] - flaw: str - uuid: str + versions: list["FlawVersion"] + flaw: UUID + uuid: UUID embargoed: bool created_dt: datetime.datetime updated_dt: datetime.datetime @@ -25,23 +45,31 @@ class OsidbApiV1FlawsPackageVersionsUpdateResponse200(OSIDBModel): env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: package = self.package - versions: List[Dict[str, Any]] = UNSET + + versions: list[dict[str, Any]] = UNSET if not isinstance(self.versions, Unset): versions = [] for versions_item_data in self.versions: - versions_item: Dict[str, Any] = UNSET + versions_item: dict[str, Any] = UNSET if not isinstance(versions_item_data, Unset): versions_item = versions_item_data.to_dict() versions.append(versions_item) - flaw = self.flaw - uuid = self.uuid + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed + created_dt: str = UNSET if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() @@ -55,10 +83,12 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(package, Unset): field_dict["package"] = package @@ -86,31 +116,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_version import FlawVersion + d = src_dict.copy() package = d.pop("package", UNSET) versions = [] _versions = d.pop("versions", UNSET) - if _versions is UNSET: - versions = UNSET + for versions_item_data in _versions or []: + # } + _versions_item = versions_item_data + versions_item: FlawVersion + if isinstance(_versions_item, Unset): + versions_item = UNSET + else: + versions_item = FlawVersion.from_dict(_versions_item) + + versions.append(versions_item) + + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET else: - for versions_item_data in _versions or []: - _versions_item = versions_item_data - versions_item: FlawVersion - if isinstance(_versions_item, Unset): - versions_item = UNSET - else: - versions_item = FlawVersion.from_dict(_versions_item) - - versions.append(versions_item) + flaw = UUID(_flaw) - flaw = d.pop("flaw", UNSET) - - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -118,6 +161,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -125,6 +169,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -152,18 +197,16 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: version=version, ) - osidb_api_v1_flaws_package_versions_update_response_200.additional_properties = ( - d - ) + osidb_api_v1_flaws_package_versions_update_response_200.additional_properties = d return osidb_api_v1_flaws_package_versions_update_response_200 @staticmethod def get_fields(): return { "package": str, - "versions": List[FlawVersion], - "flaw": str, - "uuid": str, + "versions": list["FlawVersion"], + "flaw": UUID, + "uuid": UUID, "embargoed": bool, "created_dt": datetime.datetime, "updated_dt": datetime.datetime, @@ -174,7 +217,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_promote_create_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_promote_create_response_200.py index 3dd4b66..e44e86a 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_promote_create_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_promote_create_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="OsidbApiV1FlawsPromoteCreateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsPromoteCreateResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_create_response_201.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_create_response_201.py index 8f712ce..cc91194 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_create_response_201.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_create_response_201.py @@ -1,45 +1,75 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.flaw_reference_type import FlawReferenceType from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1FlawsReferencesCreateResponse201") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsReferencesCreateResponse201(OSIDBModel): - """ """ - - flaw: str + """ + Attributes: + flaw (UUID): + url (str): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + description (Union[Unset, str]): + type_ (Union[Unset, FlawReferenceType]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + flaw: UUID url: str - uuid: str + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime description: Union[Unset, str] = UNSET - type: Union[Unset, FlawReferenceType] = UNSET + type_: Union[Unset, FlawReferenceType] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) - def to_dict(self) -> Dict[str, Any]: - flaw = self.flaw url = self.url - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -54,20 +84,22 @@ def to_dict(self) -> Dict[str, Any]: updated_dt = self.updated_dt.isoformat() description = self.description - type: Union[Unset, str] = UNSET - if not isinstance(self.type, Unset): - type = FlawReferenceType(self.type).value + type_: Union[Unset, str] = UNSET + if not isinstance(self.type_, Unset): + type_ = FlawReferenceType(self.type_).value dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(flaw, Unset): field_dict["flaw"] = flaw @@ -85,8 +117,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["updated_dt"] = updated_dt if not isinstance(description, Unset): field_dict["description"] = description - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ if not isinstance(dt, Unset): field_dict["dt"] = dt if not isinstance(env, Unset): @@ -99,31 +131,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) url = d.pop("url", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -131,6 +176,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -140,13 +186,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: description = d.pop("description", UNSET) - _type = d.pop("type", UNSET) - type: Union[Unset, FlawReferenceType] - if isinstance(_type, Unset): - type = UNSET + # } + _type_ = d.pop("type", UNSET) + type_: Union[Unset, FlawReferenceType] + if isinstance(_type_, Unset): + type_ = UNSET else: - type = FlawReferenceType(_type) + type_ = FlawReferenceType(_type_) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -169,7 +217,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: created_dt=created_dt, updated_dt=updated_dt, description=description, - type=type, + type_=type_, dt=dt, env=env, revision=revision, @@ -182,11 +230,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "flaw": str, + "flaw": UUID, "url": str, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "description": str, @@ -198,7 +246,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_destroy_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_destroy_response_200.py index 735da72..97d43a8 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_destroy_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_destroy_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="OsidbApiV1FlawsReferencesDestroyResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsReferencesDestroyResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_list_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_list_response_200.py index 1829bc5..1c5d22c 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.flaw_reference import FlawReference from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_reference import FlawReference + + T = TypeVar("T", bound="OsidbApiV1FlawsReferencesListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsReferencesListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['FlawReference']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[FlawReference] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["FlawReference"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_reference import FlawReference + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: FlawReference - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = FlawReference.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: FlawReference + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = FlawReference.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[FlawReference], - "next": str, - "previous": str, + "results": list["FlawReference"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_retrieve_response_200.py index e732ec0..5ad281d 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_retrieve_response_200.py @@ -1,45 +1,75 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.flaw_reference_type import FlawReferenceType from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1FlawsReferencesRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsReferencesRetrieveResponse200(OSIDBModel): - """ """ - - flaw: str + """ + Attributes: + flaw (UUID): + url (str): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + description (Union[Unset, str]): + type_ (Union[Unset, FlawReferenceType]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + flaw: UUID url: str - uuid: str + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime description: Union[Unset, str] = UNSET - type: Union[Unset, FlawReferenceType] = UNSET + type_: Union[Unset, FlawReferenceType] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) - def to_dict(self) -> Dict[str, Any]: - flaw = self.flaw url = self.url - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -54,20 +84,22 @@ def to_dict(self) -> Dict[str, Any]: updated_dt = self.updated_dt.isoformat() description = self.description - type: Union[Unset, str] = UNSET - if not isinstance(self.type, Unset): - type = FlawReferenceType(self.type).value + type_: Union[Unset, str] = UNSET + if not isinstance(self.type_, Unset): + type_ = FlawReferenceType(self.type_).value dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(flaw, Unset): field_dict["flaw"] = flaw @@ -85,8 +117,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["updated_dt"] = updated_dt if not isinstance(description, Unset): field_dict["description"] = description - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ if not isinstance(dt, Unset): field_dict["dt"] = dt if not isinstance(env, Unset): @@ -99,31 +131,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) url = d.pop("url", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -131,6 +176,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -140,13 +186,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: description = d.pop("description", UNSET) - _type = d.pop("type", UNSET) - type: Union[Unset, FlawReferenceType] - if isinstance(_type, Unset): - type = UNSET + # } + _type_ = d.pop("type", UNSET) + type_: Union[Unset, FlawReferenceType] + if isinstance(_type_, Unset): + type_ = UNSET else: - type = FlawReferenceType(_type) + type_ = FlawReferenceType(_type_) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -169,7 +217,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: created_dt=created_dt, updated_dt=updated_dt, description=description, - type=type, + type_=type_, dt=dt, env=env, revision=revision, @@ -182,11 +230,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "flaw": str, + "flaw": UUID, "url": str, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "description": str, @@ -198,7 +246,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_update_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_update_response_200.py index 6dd7329..8dcb3c5 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_update_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_references_update_response_200.py @@ -1,45 +1,75 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert from ..models.flaw_reference_type import FlawReferenceType from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="OsidbApiV1FlawsReferencesUpdateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsReferencesUpdateResponse200(OSIDBModel): - """ """ - - flaw: str + """ + Attributes: + flaw (UUID): + url (str): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + description (Union[Unset, str]): + type_ (Union[Unset, FlawReferenceType]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + flaw: UUID url: str - uuid: str + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime description: Union[Unset, str] = UNSET - type: Union[Unset, FlawReferenceType] = UNSET + type_: Union[Unset, FlawReferenceType] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + flaw: str = UNSET + if not isinstance(self.flaw, Unset): + flaw = str(self.flaw) - def to_dict(self) -> Dict[str, Any]: - flaw = self.flaw url = self.url - uuid = self.uuid + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -54,20 +84,22 @@ def to_dict(self) -> Dict[str, Any]: updated_dt = self.updated_dt.isoformat() description = self.description - type: Union[Unset, str] = UNSET - if not isinstance(self.type, Unset): - type = FlawReferenceType(self.type).value + type_: Union[Unset, str] = UNSET + if not isinstance(self.type_, Unset): + type_ = FlawReferenceType(self.type_).value dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(flaw, Unset): field_dict["flaw"] = flaw @@ -85,8 +117,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["updated_dt"] = updated_dt if not isinstance(description, Unset): field_dict["description"] = description - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ if not isinstance(dt, Unset): field_dict["dt"] = dt if not isinstance(env, Unset): @@ -99,31 +131,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() - flaw = d.pop("flaw", UNSET) + # } + _flaw = d.pop("flaw", UNSET) + flaw: UUID + if isinstance(_flaw, Unset): + flaw = UNSET + else: + flaw = UUID(_flaw) url = d.pop("url", UNSET) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -131,6 +176,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -140,13 +186,15 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: description = d.pop("description", UNSET) - _type = d.pop("type", UNSET) - type: Union[Unset, FlawReferenceType] - if isinstance(_type, Unset): - type = UNSET + # } + _type_ = d.pop("type", UNSET) + type_: Union[Unset, FlawReferenceType] + if isinstance(_type_, Unset): + type_ = UNSET else: - type = FlawReferenceType(_type) + type_ = FlawReferenceType(_type_) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -169,7 +217,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: created_dt=created_dt, updated_dt=updated_dt, description=description, - type=type, + type_=type_, dt=dt, env=env, revision=revision, @@ -182,11 +230,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "flaw": str, + "flaw": UUID, "url": str, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "description": str, @@ -198,7 +246,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_reject_create_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_reject_create_response_200.py index cea6f14..d1aed6d 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_reject_create_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_reject_create_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="OsidbApiV1FlawsRejectCreateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsRejectCreateResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_retrieve_response_200.py index 5cf65aa..ef9606e 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_retrieve_response_200.py @@ -1,62 +1,106 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect import Affect -from ..models.alert import Alert from ..models.blank_enum import BlankEnum -from ..models.comment import Comment -from ..models.flaw_acknowledgment import FlawAcknowledgment -from ..models.flaw_classification import FlawClassification -from ..models.flaw_cvss import FlawCVSS -from ..models.flaw_reference import FlawReference from ..models.impact_enum import ImpactEnum from ..models.major_incident_state_enum import MajorIncidentStateEnum from ..models.nist_cvss_validation_enum import NistCvssValidationEnum -from ..models.package import Package from ..models.requires_cve_description_enum import RequiresCveDescriptionEnum from ..models.source_be_0_enum import SourceBe0Enum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect import Affect + from ..models.alert import Alert + from ..models.comment import Comment + from ..models.flaw_acknowledgment import FlawAcknowledgment + from ..models.flaw_classification import FlawClassification + from ..models.flaw_cvss import FlawCVSS + from ..models.flaw_reference import FlawReference + from ..models.package import Package + + T = TypeVar("T", bound="OsidbApiV1FlawsRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsRetrieveResponse200(OSIDBModel): - """ """ - - uuid: str + """ + Attributes: + uuid (UUID): + title (str): + trackers (list[str]): + comment_zero (str): + affects (list['Affect']): + comments (list['Comment']): + package_versions (list['Package']): + acknowledgments (list['FlawAcknowledgment']): + references (list['FlawReference']): + cvss_scores (list['FlawCVSS']): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + classification (FlawClassification): + alerts (list['Alert']): + cve_id (Union[None, Unset, str]): + impact (Union[BlankEnum, ImpactEnum, Unset]): + components (Union[Unset, list[str]]): + cve_description (Union[Unset, str]): + requires_cve_description (Union[BlankEnum, RequiresCveDescriptionEnum, Unset]): + statement (Union[Unset, str]): + cwe_id (Union[Unset, str]): + unembargo_dt (Union[None, Unset, datetime.datetime]): + source (Union[BlankEnum, SourceBe0Enum, Unset]): + reported_dt (Union[None, Unset, datetime.datetime]): + mitigation (Union[Unset, str]): + major_incident_state (Union[BlankEnum, MajorIncidentStateEnum, Unset]): + major_incident_start_dt (Union[None, Unset, datetime.datetime]): + nist_cvss_validation (Union[BlankEnum, NistCvssValidationEnum, Unset]): + group_key (Union[Unset, str]): + owner (Union[Unset, str]): + task_key (Union[Unset, str]): + team_id (Union[Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + uuid: UUID title: str - trackers: List[str] + trackers: list[str] comment_zero: str - affects: List[Affect] - comments: List[Comment] - package_versions: List[Package] - acknowledgments: List[FlawAcknowledgment] - references: List[FlawReference] - cvss_scores: List[FlawCVSS] + affects: list["Affect"] + comments: list["Comment"] + package_versions: list["Package"] + acknowledgments: list["FlawAcknowledgment"] + references: list["FlawReference"] + cvss_scores: list["FlawCVSS"] embargoed: bool created_dt: datetime.datetime updated_dt: datetime.datetime - classification: FlawClassification - alerts: List[Alert] - cve_id: Union[Unset, None, str] = UNSET + classification: "FlawClassification" + alerts: list["Alert"] + cve_id: Union[None, Unset, str] = UNSET impact: Union[BlankEnum, ImpactEnum, Unset] = UNSET - components: Union[Unset, List[str]] = UNSET + components: Union[Unset, list[str]] = UNSET cve_description: Union[Unset, str] = UNSET - requires_cve_description: Union[ - BlankEnum, RequiresCveDescriptionEnum, Unset - ] = UNSET + requires_cve_description: Union[BlankEnum, RequiresCveDescriptionEnum, Unset] = UNSET statement: Union[Unset, str] = UNSET cwe_id: Union[Unset, str] = UNSET - unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET + unembargo_dt: Union[None, Unset, datetime.datetime] = UNSET source: Union[BlankEnum, SourceBe0Enum, Unset] = UNSET - reported_dt: Union[Unset, None, datetime.datetime] = UNSET + reported_dt: Union[None, Unset, datetime.datetime] = UNSET mitigation: Union[Unset, str] = UNSET major_incident_state: Union[BlankEnum, MajorIncidentStateEnum, Unset] = UNSET - major_incident_start_dt: Union[Unset, None, datetime.datetime] = UNSET + major_incident_start_dt: Union[None, Unset, datetime.datetime] = UNSET nist_cvss_validation: Union[BlankEnum, NistCvssValidationEnum, Unset] = UNSET group_key: Union[Unset, str] = UNSET owner: Union[Unset, str] = UNSET @@ -66,77 +110,83 @@ class OsidbApiV1FlawsRetrieveResponse200(OSIDBModel): env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid title = self.title - trackers: List[str] = UNSET + + trackers: list[str] = UNSET if not isinstance(self.trackers, Unset): trackers = self.trackers comment_zero = self.comment_zero - affects: List[Dict[str, Any]] = UNSET + + affects: list[dict[str, Any]] = UNSET if not isinstance(self.affects, Unset): affects = [] for affects_item_data in self.affects: - affects_item: Dict[str, Any] = UNSET + affects_item: dict[str, Any] = UNSET if not isinstance(affects_item_data, Unset): affects_item = affects_item_data.to_dict() affects.append(affects_item) - comments: List[Dict[str, Any]] = UNSET + comments: list[dict[str, Any]] = UNSET if not isinstance(self.comments, Unset): comments = [] for comments_item_data in self.comments: - comments_item: Dict[str, Any] = UNSET + comments_item: dict[str, Any] = UNSET if not isinstance(comments_item_data, Unset): comments_item = comments_item_data.to_dict() comments.append(comments_item) - package_versions: List[Dict[str, Any]] = UNSET + package_versions: list[dict[str, Any]] = UNSET if not isinstance(self.package_versions, Unset): package_versions = [] for package_versions_item_data in self.package_versions: - package_versions_item: Dict[str, Any] = UNSET + package_versions_item: dict[str, Any] = UNSET if not isinstance(package_versions_item_data, Unset): package_versions_item = package_versions_item_data.to_dict() package_versions.append(package_versions_item) - acknowledgments: List[Dict[str, Any]] = UNSET + acknowledgments: list[dict[str, Any]] = UNSET if not isinstance(self.acknowledgments, Unset): acknowledgments = [] for acknowledgments_item_data in self.acknowledgments: - acknowledgments_item: Dict[str, Any] = UNSET + acknowledgments_item: dict[str, Any] = UNSET if not isinstance(acknowledgments_item_data, Unset): acknowledgments_item = acknowledgments_item_data.to_dict() acknowledgments.append(acknowledgments_item) - references: List[Dict[str, Any]] = UNSET + references: list[dict[str, Any]] = UNSET if not isinstance(self.references, Unset): references = [] for references_item_data in self.references: - references_item: Dict[str, Any] = UNSET + references_item: dict[str, Any] = UNSET if not isinstance(references_item_data, Unset): references_item = references_item_data.to_dict() references.append(references_item) - cvss_scores: List[Dict[str, Any]] = UNSET + cvss_scores: list[dict[str, Any]] = UNSET if not isinstance(self.cvss_scores, Unset): cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() cvss_scores.append(cvss_scores_item) embargoed = self.embargoed + created_dt: str = UNSET if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() @@ -145,65 +195,72 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - classification: Dict[str, Any] = UNSET + classification: dict[str, Any] = UNSET if not isinstance(self.classification, Unset): classification = self.classification.to_dict() - alerts: List[Dict[str, Any]] = UNSET + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() alerts.append(alerts_item) - cve_id = self.cve_id + cve_id: Union[None, Unset, str] + if isinstance(self.cve_id, Unset): + cve_id = UNSET + else: + cve_id = self.cve_id + impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): impact = UNSET if not isinstance(self.impact, Unset): - impact = ImpactEnum(self.impact).value else: impact = UNSET if not isinstance(self.impact, Unset): - impact = BlankEnum(self.impact).value - components: Union[Unset, List[str]] = UNSET + components: Union[Unset, list[str]] = UNSET if not isinstance(self.components, Unset): components = self.components cve_description = self.cve_description + requires_cve_description: Union[Unset, str] if isinstance(self.requires_cve_description, Unset): requires_cve_description = UNSET elif isinstance(self.requires_cve_description, RequiresCveDescriptionEnum): requires_cve_description = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = RequiresCveDescriptionEnum( - self.requires_cve_description - ).value + requires_cve_description = RequiresCveDescriptionEnum(self.requires_cve_description).value else: requires_cve_description = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = BlankEnum( - self.requires_cve_description - ).value + requires_cve_description = BlankEnum(self.requires_cve_description).value statement = self.statement + cwe_id = self.cwe_id - unembargo_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.unembargo_dt, Unset): - unembargo_dt = self.unembargo_dt.isoformat() if self.unembargo_dt else None + + unembargo_dt: Union[None, Unset, str] + if isinstance(self.unembargo_dt, Unset): + unembargo_dt = UNSET + elif isinstance(self.unembargo_dt, datetime.datetime): + unembargo_dt = UNSET + if not isinstance(self.unembargo_dt, Unset): + unembargo_dt = self.unembargo_dt.isoformat() + + else: + unembargo_dt = self.unembargo_dt source: Union[Unset, str] if isinstance(self.source, Unset): @@ -211,44 +268,49 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.source, SourceBe0Enum): source = UNSET if not isinstance(self.source, Unset): - source = SourceBe0Enum(self.source).value else: source = UNSET if not isinstance(self.source, Unset): - source = BlankEnum(self.source).value - reported_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.reported_dt, Unset): - reported_dt = self.reported_dt.isoformat() if self.reported_dt else None + reported_dt: Union[None, Unset, str] + if isinstance(self.reported_dt, Unset): + reported_dt = UNSET + elif isinstance(self.reported_dt, datetime.datetime): + reported_dt = UNSET + if not isinstance(self.reported_dt, Unset): + reported_dt = self.reported_dt.isoformat() + + else: + reported_dt = self.reported_dt mitigation = self.mitigation + major_incident_state: Union[Unset, str] if isinstance(self.major_incident_state, Unset): major_incident_state = UNSET elif isinstance(self.major_incident_state, MajorIncidentStateEnum): major_incident_state = UNSET if not isinstance(self.major_incident_state, Unset): - - major_incident_state = MajorIncidentStateEnum( - self.major_incident_state - ).value + major_incident_state = MajorIncidentStateEnum(self.major_incident_state).value else: major_incident_state = UNSET if not isinstance(self.major_incident_state, Unset): - major_incident_state = BlankEnum(self.major_incident_state).value - major_incident_start_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.major_incident_start_dt, Unset): - major_incident_start_dt = ( - self.major_incident_start_dt.isoformat() - if self.major_incident_start_dt - else None - ) + major_incident_start_dt: Union[None, Unset, str] + if isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = UNSET + elif isinstance(self.major_incident_start_dt, datetime.datetime): + major_incident_start_dt = UNSET + if not isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = self.major_incident_start_dt.isoformat() + + else: + major_incident_start_dt = self.major_incident_start_dt nist_cvss_validation: Union[Unset, str] if isinstance(self.nist_cvss_validation, Unset): @@ -256,30 +318,32 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.nist_cvss_validation, NistCvssValidationEnum): nist_cvss_validation = UNSET if not isinstance(self.nist_cvss_validation, Unset): - - nist_cvss_validation = NistCvssValidationEnum( - self.nist_cvss_validation - ).value + nist_cvss_validation = NistCvssValidationEnum(self.nist_cvss_validation).value else: nist_cvss_validation = UNSET if not isinstance(self.nist_cvss_validation, Unset): - nist_cvss_validation = BlankEnum(self.nist_cvss_validation).value group_key = self.group_key + owner = self.owner + task_key = self.task_key + team_id = self.team_id + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid @@ -359,110 +423,112 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + from ..models.alert import Alert + from ..models.comment import Comment + from ..models.flaw_acknowledgment import FlawAcknowledgment + from ..models.flaw_classification import FlawClassification + from ..models.flaw_cvss import FlawCVSS + from ..models.flaw_reference import FlawReference + from ..models.package import Package + d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) title = d.pop("title", UNSET) - trackers = cast(List[str], d.pop("trackers", UNSET)) + trackers = cast(list[str], d.pop("trackers", UNSET)) comment_zero = d.pop("comment_zero", UNSET) affects = [] _affects = d.pop("affects", UNSET) - if _affects is UNSET: - affects = UNSET - else: - for affects_item_data in _affects or []: - _affects_item = affects_item_data - affects_item: Affect - if isinstance(_affects_item, Unset): - affects_item = UNSET - else: - affects_item = Affect.from_dict(_affects_item) + for affects_item_data in _affects or []: + # } + _affects_item = affects_item_data + affects_item: Affect + if isinstance(_affects_item, Unset): + affects_item = UNSET + else: + affects_item = Affect.from_dict(_affects_item) - affects.append(affects_item) + affects.append(affects_item) comments = [] _comments = d.pop("comments", UNSET) - if _comments is UNSET: - comments = UNSET - else: - for comments_item_data in _comments or []: - _comments_item = comments_item_data - comments_item: Comment - if isinstance(_comments_item, Unset): - comments_item = UNSET - else: - comments_item = Comment.from_dict(_comments_item) + for comments_item_data in _comments or []: + # } + _comments_item = comments_item_data + comments_item: Comment + if isinstance(_comments_item, Unset): + comments_item = UNSET + else: + comments_item = Comment.from_dict(_comments_item) - comments.append(comments_item) + comments.append(comments_item) package_versions = [] _package_versions = d.pop("package_versions", UNSET) - if _package_versions is UNSET: - package_versions = UNSET - else: - for package_versions_item_data in _package_versions or []: - _package_versions_item = package_versions_item_data - package_versions_item: Package - if isinstance(_package_versions_item, Unset): - package_versions_item = UNSET - else: - package_versions_item = Package.from_dict(_package_versions_item) + for package_versions_item_data in _package_versions or []: + # } + _package_versions_item = package_versions_item_data + package_versions_item: Package + if isinstance(_package_versions_item, Unset): + package_versions_item = UNSET + else: + package_versions_item = Package.from_dict(_package_versions_item) - package_versions.append(package_versions_item) + package_versions.append(package_versions_item) acknowledgments = [] _acknowledgments = d.pop("acknowledgments", UNSET) - if _acknowledgments is UNSET: - acknowledgments = UNSET - else: - for acknowledgments_item_data in _acknowledgments or []: - _acknowledgments_item = acknowledgments_item_data - acknowledgments_item: FlawAcknowledgment - if isinstance(_acknowledgments_item, Unset): - acknowledgments_item = UNSET - else: - acknowledgments_item = FlawAcknowledgment.from_dict( - _acknowledgments_item - ) + for acknowledgments_item_data in _acknowledgments or []: + # } + _acknowledgments_item = acknowledgments_item_data + acknowledgments_item: FlawAcknowledgment + if isinstance(_acknowledgments_item, Unset): + acknowledgments_item = UNSET + else: + acknowledgments_item = FlawAcknowledgment.from_dict(_acknowledgments_item) - acknowledgments.append(acknowledgments_item) + acknowledgments.append(acknowledgments_item) references = [] _references = d.pop("references", UNSET) - if _references is UNSET: - references = UNSET - else: - for references_item_data in _references or []: - _references_item = references_item_data - references_item: FlawReference - if isinstance(_references_item, Unset): - references_item = UNSET - else: - references_item = FlawReference.from_dict(_references_item) + for references_item_data in _references or []: + # } + _references_item = references_item_data + references_item: FlawReference + if isinstance(_references_item, Unset): + references_item = UNSET + else: + references_item = FlawReference.from_dict(_references_item) - references.append(references_item) + references.append(references_item) cvss_scores = [] _cvss_scores = d.pop("cvss_scores", UNSET) - if _cvss_scores is UNSET: - cvss_scores = UNSET - else: - for cvss_scores_item_data in _cvss_scores or []: - _cvss_scores_item = cvss_scores_item_data - cvss_scores_item: FlawCVSS - if isinstance(_cvss_scores_item, Unset): - cvss_scores_item = UNSET - else: - cvss_scores_item = FlawCVSS.from_dict(_cvss_scores_item) + for cvss_scores_item_data in _cvss_scores or []: + # } + _cvss_scores_item = cvss_scores_item_data + cvss_scores_item: FlawCVSS + if isinstance(_cvss_scores_item, Unset): + cvss_scores_item = UNSET + else: + cvss_scores_item = FlawCVSS.from_dict(_cvss_scores_item) - cvss_scores.append(cvss_scores_item) + cvss_scores.append(cvss_scores_item) embargoed = d.pop("embargoed", UNSET) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -470,6 +536,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -477,6 +544,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) + # } _classification = d.pop("classification", UNSET) classification: FlawClassification if isinstance(_classification, Unset): @@ -486,20 +554,25 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) - alerts.append(alerts_item) + alerts.append(alerts_item) - cve_id = d.pop("cve_id", UNSET) + def _parse_cve_id(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + cve_id = _parse_cve_id(d.pop("cve_id", UNSET)) def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: if isinstance(data, Unset): @@ -507,8 +580,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _impact_type_0 = data - impact_type_0: Union[Unset, ImpactEnum] + impact_type_0: ImpactEnum if isinstance(_impact_type_0, Unset): impact_type_0 = UNSET else: @@ -519,8 +593,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _impact_type_1 = data - impact_type_1: Union[Unset, BlankEnum] + impact_type_1: BlankEnum if isinstance(_impact_type_1, Unset): impact_type_1 = UNSET else: @@ -530,61 +605,67 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: impact = _parse_impact(d.pop("impact", UNSET)) - components = cast(List[str], d.pop("components", UNSET)) + components = cast(list[str], d.pop("components", UNSET)) cve_description = d.pop("cve_description", UNSET) - def _parse_requires_cve_description( - data: object, - ) -> Union[BlankEnum, RequiresCveDescriptionEnum, Unset]: + def _parse_requires_cve_description(data: object) -> Union[BlankEnum, RequiresCveDescriptionEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _requires_cve_description_type_0 = data - requires_cve_description_type_0: Union[ - Unset, RequiresCveDescriptionEnum - ] + requires_cve_description_type_0: RequiresCveDescriptionEnum if isinstance(_requires_cve_description_type_0, Unset): requires_cve_description_type_0 = UNSET else: - requires_cve_description_type_0 = RequiresCveDescriptionEnum( - _requires_cve_description_type_0 - ) + requires_cve_description_type_0 = RequiresCveDescriptionEnum(_requires_cve_description_type_0) return requires_cve_description_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _requires_cve_description_type_1 = data - requires_cve_description_type_1: Union[Unset, BlankEnum] + requires_cve_description_type_1: BlankEnum if isinstance(_requires_cve_description_type_1, Unset): requires_cve_description_type_1 = UNSET else: - requires_cve_description_type_1 = BlankEnum( - _requires_cve_description_type_1 - ) + requires_cve_description_type_1 = BlankEnum(_requires_cve_description_type_1) return requires_cve_description_type_1 - requires_cve_description = _parse_requires_cve_description( - d.pop("requires_cve_description", UNSET) - ) + requires_cve_description = _parse_requires_cve_description(d.pop("requires_cve_description", UNSET)) statement = d.pop("statement", UNSET) cwe_id = d.pop("cwe_id", UNSET) - _unembargo_dt = d.pop("unembargo_dt", UNSET) - unembargo_dt: Union[Unset, None, datetime.datetime] - if _unembargo_dt is None: - unembargo_dt = None - elif isinstance(_unembargo_dt, Unset): - unembargo_dt = UNSET - else: - unembargo_dt = isoparse(_unembargo_dt) + def _parse_unembargo_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _unembargo_dt_type_0 = data + unembargo_dt_type_0: datetime.datetime + if isinstance(_unembargo_dt_type_0, Unset): + unembargo_dt_type_0 = UNSET + else: + unembargo_dt_type_0 = isoparse(_unembargo_dt_type_0) + + return unembargo_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + unembargo_dt = _parse_unembargo_dt(d.pop("unembargo_dt", UNSET)) def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: if isinstance(data, Unset): @@ -592,8 +673,9 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _source_type_0 = data - source_type_0: Union[Unset, SourceBe0Enum] + source_type_0: SourceBe0Enum if isinstance(_source_type_0, Unset): source_type_0 = UNSET else: @@ -604,8 +686,9 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _source_type_1 = data - source_type_1: Union[Unset, BlankEnum] + source_type_1: BlankEnum if isinstance(_source_type_1, Unset): source_type_1 = UNSET else: @@ -615,41 +698,53 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: source = _parse_source(d.pop("source", UNSET)) - _reported_dt = d.pop("reported_dt", UNSET) - reported_dt: Union[Unset, None, datetime.datetime] - if _reported_dt is None: - reported_dt = None - elif isinstance(_reported_dt, Unset): - reported_dt = UNSET - else: - reported_dt = isoparse(_reported_dt) + def _parse_reported_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _reported_dt_type_0 = data + reported_dt_type_0: datetime.datetime + if isinstance(_reported_dt_type_0, Unset): + reported_dt_type_0 = UNSET + else: + reported_dt_type_0 = isoparse(_reported_dt_type_0) + + return reported_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + reported_dt = _parse_reported_dt(d.pop("reported_dt", UNSET)) mitigation = d.pop("mitigation", UNSET) - def _parse_major_incident_state( - data: object, - ) -> Union[BlankEnum, MajorIncidentStateEnum, Unset]: + def _parse_major_incident_state(data: object) -> Union[BlankEnum, MajorIncidentStateEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _major_incident_state_type_0 = data - major_incident_state_type_0: Union[Unset, MajorIncidentStateEnum] + major_incident_state_type_0: MajorIncidentStateEnum if isinstance(_major_incident_state_type_0, Unset): major_incident_state_type_0 = UNSET else: - major_incident_state_type_0 = MajorIncidentStateEnum( - _major_incident_state_type_0 - ) + major_incident_state_type_0 = MajorIncidentStateEnum(_major_incident_state_type_0) return major_incident_state_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _major_incident_state_type_1 = data - major_incident_state_type_1: Union[Unset, BlankEnum] + major_incident_state_type_1: BlankEnum if isinstance(_major_incident_state_type_1, Unset): major_incident_state_type_1 = UNSET else: @@ -657,43 +752,53 @@ def _parse_major_incident_state( return major_incident_state_type_1 - major_incident_state = _parse_major_incident_state( - d.pop("major_incident_state", UNSET) - ) + major_incident_state = _parse_major_incident_state(d.pop("major_incident_state", UNSET)) - _major_incident_start_dt = d.pop("major_incident_start_dt", UNSET) - major_incident_start_dt: Union[Unset, None, datetime.datetime] - if _major_incident_start_dt is None: - major_incident_start_dt = None - elif isinstance(_major_incident_start_dt, Unset): - major_incident_start_dt = UNSET - else: - major_incident_start_dt = isoparse(_major_incident_start_dt) + def _parse_major_incident_start_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _major_incident_start_dt_type_0 = data + major_incident_start_dt_type_0: datetime.datetime + if isinstance(_major_incident_start_dt_type_0, Unset): + major_incident_start_dt_type_0 = UNSET + else: + major_incident_start_dt_type_0 = isoparse(_major_incident_start_dt_type_0) - def _parse_nist_cvss_validation( - data: object, - ) -> Union[BlankEnum, NistCvssValidationEnum, Unset]: + return major_incident_start_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + major_incident_start_dt = _parse_major_incident_start_dt(d.pop("major_incident_start_dt", UNSET)) + + def _parse_nist_cvss_validation(data: object) -> Union[BlankEnum, NistCvssValidationEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _nist_cvss_validation_type_0 = data - nist_cvss_validation_type_0: Union[Unset, NistCvssValidationEnum] + nist_cvss_validation_type_0: NistCvssValidationEnum if isinstance(_nist_cvss_validation_type_0, Unset): nist_cvss_validation_type_0 = UNSET else: - nist_cvss_validation_type_0 = NistCvssValidationEnum( - _nist_cvss_validation_type_0 - ) + nist_cvss_validation_type_0 = NistCvssValidationEnum(_nist_cvss_validation_type_0) return nist_cvss_validation_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _nist_cvss_validation_type_1 = data - nist_cvss_validation_type_1: Union[Unset, BlankEnum] + nist_cvss_validation_type_1: BlankEnum if isinstance(_nist_cvss_validation_type_1, Unset): nist_cvss_validation_type_1 = UNSET else: @@ -701,9 +806,7 @@ def _parse_nist_cvss_validation( return nist_cvss_validation_type_1 - nist_cvss_validation = _parse_nist_cvss_validation( - d.pop("nist_cvss_validation", UNSET) - ) + nist_cvss_validation = _parse_nist_cvss_validation(d.pop("nist_cvss_validation", UNSET)) group_key = d.pop("group_key", UNSET) @@ -713,6 +816,7 @@ def _parse_nist_cvss_validation( team_id = d.pop("team_id", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -772,34 +876,34 @@ def _parse_nist_cvss_validation( @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, "title": str, - "trackers": List[str], + "trackers": list[str], "comment_zero": str, - "affects": List[Affect], - "comments": List[Comment], - "package_versions": List[Package], - "acknowledgments": List[FlawAcknowledgment], - "references": List[FlawReference], - "cvss_scores": List[FlawCVSS], + "affects": list["Affect"], + "comments": list["Comment"], + "package_versions": list["Package"], + "acknowledgments": list["FlawAcknowledgment"], + "references": list["FlawReference"], + "cvss_scores": list["FlawCVSS"], "embargoed": bool, "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "classification": FlawClassification, - "alerts": List[Alert], - "cve_id": str, + "alerts": list["Alert"], + "cve_id": Union[None, str], "impact": Union[BlankEnum, ImpactEnum], - "components": List[str], + "components": list[str], "cve_description": str, "requires_cve_description": Union[BlankEnum, RequiresCveDescriptionEnum], "statement": str, "cwe_id": str, - "unembargo_dt": datetime.datetime, + "unembargo_dt": Union[None, datetime.datetime], "source": Union[BlankEnum, SourceBe0Enum], - "reported_dt": datetime.datetime, + "reported_dt": Union[None, datetime.datetime], "mitigation": str, "major_incident_state": Union[BlankEnum, MajorIncidentStateEnum], - "major_incident_start_dt": datetime.datetime, + "major_incident_start_dt": Union[None, datetime.datetime], "nist_cvss_validation": Union[BlankEnum, NistCvssValidationEnum], "group_key": str, "owner": str, @@ -812,7 +916,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_update_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_update_response_200.py index 1673404..dff0068 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_update_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_flaws_update_response_200.py @@ -1,62 +1,106 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect import Affect -from ..models.alert import Alert from ..models.blank_enum import BlankEnum -from ..models.comment import Comment -from ..models.flaw_acknowledgment import FlawAcknowledgment -from ..models.flaw_classification import FlawClassification -from ..models.flaw_cvss import FlawCVSS -from ..models.flaw_reference import FlawReference from ..models.impact_enum import ImpactEnum from ..models.major_incident_state_enum import MajorIncidentStateEnum from ..models.nist_cvss_validation_enum import NistCvssValidationEnum -from ..models.package import Package from ..models.requires_cve_description_enum import RequiresCveDescriptionEnum from ..models.source_be_0_enum import SourceBe0Enum from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect import Affect + from ..models.alert import Alert + from ..models.comment import Comment + from ..models.flaw_acknowledgment import FlawAcknowledgment + from ..models.flaw_classification import FlawClassification + from ..models.flaw_cvss import FlawCVSS + from ..models.flaw_reference import FlawReference + from ..models.package import Package + + T = TypeVar("T", bound="OsidbApiV1FlawsUpdateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1FlawsUpdateResponse200(OSIDBModel): - """ """ - - uuid: str + """ + Attributes: + uuid (UUID): + title (str): + trackers (list[str]): + comment_zero (str): + affects (list['Affect']): + comments (list['Comment']): + package_versions (list['Package']): + acknowledgments (list['FlawAcknowledgment']): + references (list['FlawReference']): + cvss_scores (list['FlawCVSS']): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + classification (FlawClassification): + alerts (list['Alert']): + cve_id (Union[None, Unset, str]): + impact (Union[BlankEnum, ImpactEnum, Unset]): + components (Union[Unset, list[str]]): + cve_description (Union[Unset, str]): + requires_cve_description (Union[BlankEnum, RequiresCveDescriptionEnum, Unset]): + statement (Union[Unset, str]): + cwe_id (Union[Unset, str]): + unembargo_dt (Union[None, Unset, datetime.datetime]): + source (Union[BlankEnum, SourceBe0Enum, Unset]): + reported_dt (Union[None, Unset, datetime.datetime]): + mitigation (Union[Unset, str]): + major_incident_state (Union[BlankEnum, MajorIncidentStateEnum, Unset]): + major_incident_start_dt (Union[None, Unset, datetime.datetime]): + nist_cvss_validation (Union[BlankEnum, NistCvssValidationEnum, Unset]): + group_key (Union[Unset, str]): + owner (Union[Unset, str]): + task_key (Union[Unset, str]): + team_id (Union[Unset, str]): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + uuid: UUID title: str - trackers: List[str] + trackers: list[str] comment_zero: str - affects: List[Affect] - comments: List[Comment] - package_versions: List[Package] - acknowledgments: List[FlawAcknowledgment] - references: List[FlawReference] - cvss_scores: List[FlawCVSS] + affects: list["Affect"] + comments: list["Comment"] + package_versions: list["Package"] + acknowledgments: list["FlawAcknowledgment"] + references: list["FlawReference"] + cvss_scores: list["FlawCVSS"] embargoed: bool created_dt: datetime.datetime updated_dt: datetime.datetime - classification: FlawClassification - alerts: List[Alert] - cve_id: Union[Unset, None, str] = UNSET + classification: "FlawClassification" + alerts: list["Alert"] + cve_id: Union[None, Unset, str] = UNSET impact: Union[BlankEnum, ImpactEnum, Unset] = UNSET - components: Union[Unset, List[str]] = UNSET + components: Union[Unset, list[str]] = UNSET cve_description: Union[Unset, str] = UNSET - requires_cve_description: Union[ - BlankEnum, RequiresCveDescriptionEnum, Unset - ] = UNSET + requires_cve_description: Union[BlankEnum, RequiresCveDescriptionEnum, Unset] = UNSET statement: Union[Unset, str] = UNSET cwe_id: Union[Unset, str] = UNSET - unembargo_dt: Union[Unset, None, datetime.datetime] = UNSET + unembargo_dt: Union[None, Unset, datetime.datetime] = UNSET source: Union[BlankEnum, SourceBe0Enum, Unset] = UNSET - reported_dt: Union[Unset, None, datetime.datetime] = UNSET + reported_dt: Union[None, Unset, datetime.datetime] = UNSET mitigation: Union[Unset, str] = UNSET major_incident_state: Union[BlankEnum, MajorIncidentStateEnum, Unset] = UNSET - major_incident_start_dt: Union[Unset, None, datetime.datetime] = UNSET + major_incident_start_dt: Union[None, Unset, datetime.datetime] = UNSET nist_cvss_validation: Union[BlankEnum, NistCvssValidationEnum, Unset] = UNSET group_key: Union[Unset, str] = UNSET owner: Union[Unset, str] = UNSET @@ -66,77 +110,83 @@ class OsidbApiV1FlawsUpdateResponse200(OSIDBModel): env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - def to_dict(self) -> Dict[str, Any]: - uuid = self.uuid title = self.title - trackers: List[str] = UNSET + + trackers: list[str] = UNSET if not isinstance(self.trackers, Unset): trackers = self.trackers comment_zero = self.comment_zero - affects: List[Dict[str, Any]] = UNSET + + affects: list[dict[str, Any]] = UNSET if not isinstance(self.affects, Unset): affects = [] for affects_item_data in self.affects: - affects_item: Dict[str, Any] = UNSET + affects_item: dict[str, Any] = UNSET if not isinstance(affects_item_data, Unset): affects_item = affects_item_data.to_dict() affects.append(affects_item) - comments: List[Dict[str, Any]] = UNSET + comments: list[dict[str, Any]] = UNSET if not isinstance(self.comments, Unset): comments = [] for comments_item_data in self.comments: - comments_item: Dict[str, Any] = UNSET + comments_item: dict[str, Any] = UNSET if not isinstance(comments_item_data, Unset): comments_item = comments_item_data.to_dict() comments.append(comments_item) - package_versions: List[Dict[str, Any]] = UNSET + package_versions: list[dict[str, Any]] = UNSET if not isinstance(self.package_versions, Unset): package_versions = [] for package_versions_item_data in self.package_versions: - package_versions_item: Dict[str, Any] = UNSET + package_versions_item: dict[str, Any] = UNSET if not isinstance(package_versions_item_data, Unset): package_versions_item = package_versions_item_data.to_dict() package_versions.append(package_versions_item) - acknowledgments: List[Dict[str, Any]] = UNSET + acknowledgments: list[dict[str, Any]] = UNSET if not isinstance(self.acknowledgments, Unset): acknowledgments = [] for acknowledgments_item_data in self.acknowledgments: - acknowledgments_item: Dict[str, Any] = UNSET + acknowledgments_item: dict[str, Any] = UNSET if not isinstance(acknowledgments_item_data, Unset): acknowledgments_item = acknowledgments_item_data.to_dict() acknowledgments.append(acknowledgments_item) - references: List[Dict[str, Any]] = UNSET + references: list[dict[str, Any]] = UNSET if not isinstance(self.references, Unset): references = [] for references_item_data in self.references: - references_item: Dict[str, Any] = UNSET + references_item: dict[str, Any] = UNSET if not isinstance(references_item_data, Unset): references_item = references_item_data.to_dict() references.append(references_item) - cvss_scores: List[Dict[str, Any]] = UNSET + cvss_scores: list[dict[str, Any]] = UNSET if not isinstance(self.cvss_scores, Unset): cvss_scores = [] for cvss_scores_item_data in self.cvss_scores: - cvss_scores_item: Dict[str, Any] = UNSET + cvss_scores_item: dict[str, Any] = UNSET if not isinstance(cvss_scores_item_data, Unset): cvss_scores_item = cvss_scores_item_data.to_dict() cvss_scores.append(cvss_scores_item) embargoed = self.embargoed + created_dt: str = UNSET if not isinstance(self.created_dt, Unset): created_dt = self.created_dt.isoformat() @@ -145,65 +195,72 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - classification: Dict[str, Any] = UNSET + classification: dict[str, Any] = UNSET if not isinstance(self.classification, Unset): classification = self.classification.to_dict() - alerts: List[Dict[str, Any]] = UNSET + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() alerts.append(alerts_item) - cve_id = self.cve_id + cve_id: Union[None, Unset, str] + if isinstance(self.cve_id, Unset): + cve_id = UNSET + else: + cve_id = self.cve_id + impact: Union[Unset, str] if isinstance(self.impact, Unset): impact = UNSET elif isinstance(self.impact, ImpactEnum): impact = UNSET if not isinstance(self.impact, Unset): - impact = ImpactEnum(self.impact).value else: impact = UNSET if not isinstance(self.impact, Unset): - impact = BlankEnum(self.impact).value - components: Union[Unset, List[str]] = UNSET + components: Union[Unset, list[str]] = UNSET if not isinstance(self.components, Unset): components = self.components cve_description = self.cve_description + requires_cve_description: Union[Unset, str] if isinstance(self.requires_cve_description, Unset): requires_cve_description = UNSET elif isinstance(self.requires_cve_description, RequiresCveDescriptionEnum): requires_cve_description = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = RequiresCveDescriptionEnum( - self.requires_cve_description - ).value + requires_cve_description = RequiresCveDescriptionEnum(self.requires_cve_description).value else: requires_cve_description = UNSET if not isinstance(self.requires_cve_description, Unset): - - requires_cve_description = BlankEnum( - self.requires_cve_description - ).value + requires_cve_description = BlankEnum(self.requires_cve_description).value statement = self.statement + cwe_id = self.cwe_id - unembargo_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.unembargo_dt, Unset): - unembargo_dt = self.unembargo_dt.isoformat() if self.unembargo_dt else None + + unembargo_dt: Union[None, Unset, str] + if isinstance(self.unembargo_dt, Unset): + unembargo_dt = UNSET + elif isinstance(self.unembargo_dt, datetime.datetime): + unembargo_dt = UNSET + if not isinstance(self.unembargo_dt, Unset): + unembargo_dt = self.unembargo_dt.isoformat() + + else: + unembargo_dt = self.unembargo_dt source: Union[Unset, str] if isinstance(self.source, Unset): @@ -211,44 +268,49 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.source, SourceBe0Enum): source = UNSET if not isinstance(self.source, Unset): - source = SourceBe0Enum(self.source).value else: source = UNSET if not isinstance(self.source, Unset): - source = BlankEnum(self.source).value - reported_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.reported_dt, Unset): - reported_dt = self.reported_dt.isoformat() if self.reported_dt else None + reported_dt: Union[None, Unset, str] + if isinstance(self.reported_dt, Unset): + reported_dt = UNSET + elif isinstance(self.reported_dt, datetime.datetime): + reported_dt = UNSET + if not isinstance(self.reported_dt, Unset): + reported_dt = self.reported_dt.isoformat() + + else: + reported_dt = self.reported_dt mitigation = self.mitigation + major_incident_state: Union[Unset, str] if isinstance(self.major_incident_state, Unset): major_incident_state = UNSET elif isinstance(self.major_incident_state, MajorIncidentStateEnum): major_incident_state = UNSET if not isinstance(self.major_incident_state, Unset): - - major_incident_state = MajorIncidentStateEnum( - self.major_incident_state - ).value + major_incident_state = MajorIncidentStateEnum(self.major_incident_state).value else: major_incident_state = UNSET if not isinstance(self.major_incident_state, Unset): - major_incident_state = BlankEnum(self.major_incident_state).value - major_incident_start_dt: Union[Unset, None, str] = UNSET - if not isinstance(self.major_incident_start_dt, Unset): - major_incident_start_dt = ( - self.major_incident_start_dt.isoformat() - if self.major_incident_start_dt - else None - ) + major_incident_start_dt: Union[None, Unset, str] + if isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = UNSET + elif isinstance(self.major_incident_start_dt, datetime.datetime): + major_incident_start_dt = UNSET + if not isinstance(self.major_incident_start_dt, Unset): + major_incident_start_dt = self.major_incident_start_dt.isoformat() + + else: + major_incident_start_dt = self.major_incident_start_dt nist_cvss_validation: Union[Unset, str] if isinstance(self.nist_cvss_validation, Unset): @@ -256,30 +318,32 @@ def to_dict(self) -> Dict[str, Any]: elif isinstance(self.nist_cvss_validation, NistCvssValidationEnum): nist_cvss_validation = UNSET if not isinstance(self.nist_cvss_validation, Unset): - - nist_cvss_validation = NistCvssValidationEnum( - self.nist_cvss_validation - ).value + nist_cvss_validation = NistCvssValidationEnum(self.nist_cvss_validation).value else: nist_cvss_validation = UNSET if not isinstance(self.nist_cvss_validation, Unset): - nist_cvss_validation = BlankEnum(self.nist_cvss_validation).value group_key = self.group_key + owner = self.owner + task_key = self.task_key + team_id = self.team_id + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(uuid, Unset): field_dict["uuid"] = uuid @@ -359,110 +423,112 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + from ..models.alert import Alert + from ..models.comment import Comment + from ..models.flaw_acknowledgment import FlawAcknowledgment + from ..models.flaw_classification import FlawClassification + from ..models.flaw_cvss import FlawCVSS + from ..models.flaw_reference import FlawReference + from ..models.package import Package + d = src_dict.copy() - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) title = d.pop("title", UNSET) - trackers = cast(List[str], d.pop("trackers", UNSET)) + trackers = cast(list[str], d.pop("trackers", UNSET)) comment_zero = d.pop("comment_zero", UNSET) affects = [] _affects = d.pop("affects", UNSET) - if _affects is UNSET: - affects = UNSET - else: - for affects_item_data in _affects or []: - _affects_item = affects_item_data - affects_item: Affect - if isinstance(_affects_item, Unset): - affects_item = UNSET - else: - affects_item = Affect.from_dict(_affects_item) + for affects_item_data in _affects or []: + # } + _affects_item = affects_item_data + affects_item: Affect + if isinstance(_affects_item, Unset): + affects_item = UNSET + else: + affects_item = Affect.from_dict(_affects_item) - affects.append(affects_item) + affects.append(affects_item) comments = [] _comments = d.pop("comments", UNSET) - if _comments is UNSET: - comments = UNSET - else: - for comments_item_data in _comments or []: - _comments_item = comments_item_data - comments_item: Comment - if isinstance(_comments_item, Unset): - comments_item = UNSET - else: - comments_item = Comment.from_dict(_comments_item) + for comments_item_data in _comments or []: + # } + _comments_item = comments_item_data + comments_item: Comment + if isinstance(_comments_item, Unset): + comments_item = UNSET + else: + comments_item = Comment.from_dict(_comments_item) - comments.append(comments_item) + comments.append(comments_item) package_versions = [] _package_versions = d.pop("package_versions", UNSET) - if _package_versions is UNSET: - package_versions = UNSET - else: - for package_versions_item_data in _package_versions or []: - _package_versions_item = package_versions_item_data - package_versions_item: Package - if isinstance(_package_versions_item, Unset): - package_versions_item = UNSET - else: - package_versions_item = Package.from_dict(_package_versions_item) + for package_versions_item_data in _package_versions or []: + # } + _package_versions_item = package_versions_item_data + package_versions_item: Package + if isinstance(_package_versions_item, Unset): + package_versions_item = UNSET + else: + package_versions_item = Package.from_dict(_package_versions_item) - package_versions.append(package_versions_item) + package_versions.append(package_versions_item) acknowledgments = [] _acknowledgments = d.pop("acknowledgments", UNSET) - if _acknowledgments is UNSET: - acknowledgments = UNSET - else: - for acknowledgments_item_data in _acknowledgments or []: - _acknowledgments_item = acknowledgments_item_data - acknowledgments_item: FlawAcknowledgment - if isinstance(_acknowledgments_item, Unset): - acknowledgments_item = UNSET - else: - acknowledgments_item = FlawAcknowledgment.from_dict( - _acknowledgments_item - ) + for acknowledgments_item_data in _acknowledgments or []: + # } + _acknowledgments_item = acknowledgments_item_data + acknowledgments_item: FlawAcknowledgment + if isinstance(_acknowledgments_item, Unset): + acknowledgments_item = UNSET + else: + acknowledgments_item = FlawAcknowledgment.from_dict(_acknowledgments_item) - acknowledgments.append(acknowledgments_item) + acknowledgments.append(acknowledgments_item) references = [] _references = d.pop("references", UNSET) - if _references is UNSET: - references = UNSET - else: - for references_item_data in _references or []: - _references_item = references_item_data - references_item: FlawReference - if isinstance(_references_item, Unset): - references_item = UNSET - else: - references_item = FlawReference.from_dict(_references_item) + for references_item_data in _references or []: + # } + _references_item = references_item_data + references_item: FlawReference + if isinstance(_references_item, Unset): + references_item = UNSET + else: + references_item = FlawReference.from_dict(_references_item) - references.append(references_item) + references.append(references_item) cvss_scores = [] _cvss_scores = d.pop("cvss_scores", UNSET) - if _cvss_scores is UNSET: - cvss_scores = UNSET - else: - for cvss_scores_item_data in _cvss_scores or []: - _cvss_scores_item = cvss_scores_item_data - cvss_scores_item: FlawCVSS - if isinstance(_cvss_scores_item, Unset): - cvss_scores_item = UNSET - else: - cvss_scores_item = FlawCVSS.from_dict(_cvss_scores_item) + for cvss_scores_item_data in _cvss_scores or []: + # } + _cvss_scores_item = cvss_scores_item_data + cvss_scores_item: FlawCVSS + if isinstance(_cvss_scores_item, Unset): + cvss_scores_item = UNSET + else: + cvss_scores_item = FlawCVSS.from_dict(_cvss_scores_item) - cvss_scores.append(cvss_scores_item) + cvss_scores.append(cvss_scores_item) embargoed = d.pop("embargoed", UNSET) + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -470,6 +536,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -477,6 +544,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) + # } _classification = d.pop("classification", UNSET) classification: FlawClassification if isinstance(_classification, Unset): @@ -486,20 +554,25 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) - alerts.append(alerts_item) + alerts.append(alerts_item) - cve_id = d.pop("cve_id", UNSET) + def _parse_cve_id(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + cve_id = _parse_cve_id(d.pop("cve_id", UNSET)) def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: if isinstance(data, Unset): @@ -507,8 +580,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _impact_type_0 = data - impact_type_0: Union[Unset, ImpactEnum] + impact_type_0: ImpactEnum if isinstance(_impact_type_0, Unset): impact_type_0 = UNSET else: @@ -519,8 +593,9 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _impact_type_1 = data - impact_type_1: Union[Unset, BlankEnum] + impact_type_1: BlankEnum if isinstance(_impact_type_1, Unset): impact_type_1 = UNSET else: @@ -530,61 +605,67 @@ def _parse_impact(data: object) -> Union[BlankEnum, ImpactEnum, Unset]: impact = _parse_impact(d.pop("impact", UNSET)) - components = cast(List[str], d.pop("components", UNSET)) + components = cast(list[str], d.pop("components", UNSET)) cve_description = d.pop("cve_description", UNSET) - def _parse_requires_cve_description( - data: object, - ) -> Union[BlankEnum, RequiresCveDescriptionEnum, Unset]: + def _parse_requires_cve_description(data: object) -> Union[BlankEnum, RequiresCveDescriptionEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _requires_cve_description_type_0 = data - requires_cve_description_type_0: Union[ - Unset, RequiresCveDescriptionEnum - ] + requires_cve_description_type_0: RequiresCveDescriptionEnum if isinstance(_requires_cve_description_type_0, Unset): requires_cve_description_type_0 = UNSET else: - requires_cve_description_type_0 = RequiresCveDescriptionEnum( - _requires_cve_description_type_0 - ) + requires_cve_description_type_0 = RequiresCveDescriptionEnum(_requires_cve_description_type_0) return requires_cve_description_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _requires_cve_description_type_1 = data - requires_cve_description_type_1: Union[Unset, BlankEnum] + requires_cve_description_type_1: BlankEnum if isinstance(_requires_cve_description_type_1, Unset): requires_cve_description_type_1 = UNSET else: - requires_cve_description_type_1 = BlankEnum( - _requires_cve_description_type_1 - ) + requires_cve_description_type_1 = BlankEnum(_requires_cve_description_type_1) return requires_cve_description_type_1 - requires_cve_description = _parse_requires_cve_description( - d.pop("requires_cve_description", UNSET) - ) + requires_cve_description = _parse_requires_cve_description(d.pop("requires_cve_description", UNSET)) statement = d.pop("statement", UNSET) cwe_id = d.pop("cwe_id", UNSET) - _unembargo_dt = d.pop("unembargo_dt", UNSET) - unembargo_dt: Union[Unset, None, datetime.datetime] - if _unembargo_dt is None: - unembargo_dt = None - elif isinstance(_unembargo_dt, Unset): - unembargo_dt = UNSET - else: - unembargo_dt = isoparse(_unembargo_dt) + def _parse_unembargo_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _unembargo_dt_type_0 = data + unembargo_dt_type_0: datetime.datetime + if isinstance(_unembargo_dt_type_0, Unset): + unembargo_dt_type_0 = UNSET + else: + unembargo_dt_type_0 = isoparse(_unembargo_dt_type_0) + + return unembargo_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + unembargo_dt = _parse_unembargo_dt(d.pop("unembargo_dt", UNSET)) def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: if isinstance(data, Unset): @@ -592,8 +673,9 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: try: if not isinstance(data, str): raise TypeError() + # } _source_type_0 = data - source_type_0: Union[Unset, SourceBe0Enum] + source_type_0: SourceBe0Enum if isinstance(_source_type_0, Unset): source_type_0 = UNSET else: @@ -604,8 +686,9 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: pass if not isinstance(data, str): raise TypeError() + # } _source_type_1 = data - source_type_1: Union[Unset, BlankEnum] + source_type_1: BlankEnum if isinstance(_source_type_1, Unset): source_type_1 = UNSET else: @@ -615,41 +698,53 @@ def _parse_source(data: object) -> Union[BlankEnum, SourceBe0Enum, Unset]: source = _parse_source(d.pop("source", UNSET)) - _reported_dt = d.pop("reported_dt", UNSET) - reported_dt: Union[Unset, None, datetime.datetime] - if _reported_dt is None: - reported_dt = None - elif isinstance(_reported_dt, Unset): - reported_dt = UNSET - else: - reported_dt = isoparse(_reported_dt) + def _parse_reported_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _reported_dt_type_0 = data + reported_dt_type_0: datetime.datetime + if isinstance(_reported_dt_type_0, Unset): + reported_dt_type_0 = UNSET + else: + reported_dt_type_0 = isoparse(_reported_dt_type_0) + + return reported_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + reported_dt = _parse_reported_dt(d.pop("reported_dt", UNSET)) mitigation = d.pop("mitigation", UNSET) - def _parse_major_incident_state( - data: object, - ) -> Union[BlankEnum, MajorIncidentStateEnum, Unset]: + def _parse_major_incident_state(data: object) -> Union[BlankEnum, MajorIncidentStateEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _major_incident_state_type_0 = data - major_incident_state_type_0: Union[Unset, MajorIncidentStateEnum] + major_incident_state_type_0: MajorIncidentStateEnum if isinstance(_major_incident_state_type_0, Unset): major_incident_state_type_0 = UNSET else: - major_incident_state_type_0 = MajorIncidentStateEnum( - _major_incident_state_type_0 - ) + major_incident_state_type_0 = MajorIncidentStateEnum(_major_incident_state_type_0) return major_incident_state_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _major_incident_state_type_1 = data - major_incident_state_type_1: Union[Unset, BlankEnum] + major_incident_state_type_1: BlankEnum if isinstance(_major_incident_state_type_1, Unset): major_incident_state_type_1 = UNSET else: @@ -657,43 +752,53 @@ def _parse_major_incident_state( return major_incident_state_type_1 - major_incident_state = _parse_major_incident_state( - d.pop("major_incident_state", UNSET) - ) + major_incident_state = _parse_major_incident_state(d.pop("major_incident_state", UNSET)) - _major_incident_start_dt = d.pop("major_incident_start_dt", UNSET) - major_incident_start_dt: Union[Unset, None, datetime.datetime] - if _major_incident_start_dt is None: - major_incident_start_dt = None - elif isinstance(_major_incident_start_dt, Unset): - major_incident_start_dt = UNSET - else: - major_incident_start_dt = isoparse(_major_incident_start_dt) + def _parse_major_incident_start_dt(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + # } + _major_incident_start_dt_type_0 = data + major_incident_start_dt_type_0: datetime.datetime + if isinstance(_major_incident_start_dt_type_0, Unset): + major_incident_start_dt_type_0 = UNSET + else: + major_incident_start_dt_type_0 = isoparse(_major_incident_start_dt_type_0) - def _parse_nist_cvss_validation( - data: object, - ) -> Union[BlankEnum, NistCvssValidationEnum, Unset]: + return major_incident_start_dt_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + major_incident_start_dt = _parse_major_incident_start_dt(d.pop("major_incident_start_dt", UNSET)) + + def _parse_nist_cvss_validation(data: object) -> Union[BlankEnum, NistCvssValidationEnum, Unset]: if isinstance(data, Unset): return data try: if not isinstance(data, str): raise TypeError() + # } _nist_cvss_validation_type_0 = data - nist_cvss_validation_type_0: Union[Unset, NistCvssValidationEnum] + nist_cvss_validation_type_0: NistCvssValidationEnum if isinstance(_nist_cvss_validation_type_0, Unset): nist_cvss_validation_type_0 = UNSET else: - nist_cvss_validation_type_0 = NistCvssValidationEnum( - _nist_cvss_validation_type_0 - ) + nist_cvss_validation_type_0 = NistCvssValidationEnum(_nist_cvss_validation_type_0) return nist_cvss_validation_type_0 except: # noqa: E722 pass if not isinstance(data, str): raise TypeError() + # } _nist_cvss_validation_type_1 = data - nist_cvss_validation_type_1: Union[Unset, BlankEnum] + nist_cvss_validation_type_1: BlankEnum if isinstance(_nist_cvss_validation_type_1, Unset): nist_cvss_validation_type_1 = UNSET else: @@ -701,9 +806,7 @@ def _parse_nist_cvss_validation( return nist_cvss_validation_type_1 - nist_cvss_validation = _parse_nist_cvss_validation( - d.pop("nist_cvss_validation", UNSET) - ) + nist_cvss_validation = _parse_nist_cvss_validation(d.pop("nist_cvss_validation", UNSET)) group_key = d.pop("group_key", UNSET) @@ -713,6 +816,7 @@ def _parse_nist_cvss_validation( team_id = d.pop("team_id", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -772,34 +876,34 @@ def _parse_nist_cvss_validation( @staticmethod def get_fields(): return { - "uuid": str, + "uuid": UUID, "title": str, - "trackers": List[str], + "trackers": list[str], "comment_zero": str, - "affects": List[Affect], - "comments": List[Comment], - "package_versions": List[Package], - "acknowledgments": List[FlawAcknowledgment], - "references": List[FlawReference], - "cvss_scores": List[FlawCVSS], + "affects": list["Affect"], + "comments": list["Comment"], + "package_versions": list["Package"], + "acknowledgments": list["FlawAcknowledgment"], + "references": list["FlawReference"], + "cvss_scores": list["FlawCVSS"], "embargoed": bool, "created_dt": datetime.datetime, "updated_dt": datetime.datetime, "classification": FlawClassification, - "alerts": List[Alert], - "cve_id": str, + "alerts": list["Alert"], + "cve_id": Union[None, str], "impact": Union[BlankEnum, ImpactEnum], - "components": List[str], + "components": list[str], "cve_description": str, "requires_cve_description": Union[BlankEnum, RequiresCveDescriptionEnum], "statement": str, "cwe_id": str, - "unembargo_dt": datetime.datetime, + "unembargo_dt": Union[None, datetime.datetime], "source": Union[BlankEnum, SourceBe0Enum], - "reported_dt": datetime.datetime, + "reported_dt": Union[None, datetime.datetime], "mitigation": str, "major_incident_state": Union[BlankEnum, MajorIncidentStateEnum], - "major_incident_start_dt": datetime.datetime, + "major_incident_start_dt": Union[None, datetime.datetime], "nist_cvss_validation": Union[BlankEnum, NistCvssValidationEnum], "group_key": str, "owner": str, @@ -812,7 +916,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_manifest_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_manifest_retrieve_response_200.py index acc58be..a0733ab 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_manifest_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_manifest_retrieve_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="OsidbApiV1ManifestRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1ManifestRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_schema_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_schema_retrieve_response_200.py index 07da2d1..c5ac133 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_schema_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_schema_retrieve_response_200.py @@ -1,83 +1,39 @@ -import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar -import attr -from dateutil.parser import isoparse +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..types import UNSET, OSIDBModel, Unset +from ..types import OSIDBModel T = TypeVar("T", bound="OsidbApiV1SchemaRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1SchemaRetrieveResponse200(OSIDBModel): """ """ - dt: Union[Unset, datetime.datetime] = UNSET - env: Union[Unset, str] = UNSET - revision: Union[Unset, str] = UNSET - version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - dt: Union[Unset, str] = UNSET - if not isinstance(self.dt, Unset): - dt = self.dt.isoformat() - - env = self.env - revision = self.revision - version = self.version - - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) - if not isinstance(dt, Unset): - field_dict["dt"] = dt - if not isinstance(env, Unset): - field_dict["env"] = env - if not isinstance(revision, Unset): - field_dict["revision"] = revision - if not isinstance(version, Unset): - field_dict["version"] = version return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() - _dt = d.pop("dt", UNSET) - dt: Union[Unset, datetime.datetime] - if isinstance(_dt, Unset): - dt = UNSET - else: - dt = isoparse(_dt) - - env = d.pop("env", UNSET) - - revision = d.pop("revision", UNSET) - - version = d.pop("version", UNSET) - - osidb_api_v1_schema_retrieve_response_200 = cls( - dt=dt, - env=env, - revision=revision, - version=version, - ) + osidb_api_v1_schema_retrieve_response_200 = cls() osidb_api_v1_schema_retrieve_response_200.additional_properties = d return osidb_api_v1_schema_retrieve_response_200 @staticmethod def get_fields(): - return { - "dt": datetime.datetime, - "env": str, - "revision": str, - "version": str, - } + return {} @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_status_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_status_retrieve_response_200.py index b08286c..a2fc3ae 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_status_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_status_retrieve_response_200.py @@ -1,50 +1,64 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.osidb_api_v1_status_retrieve_response_200_osidb_data import ( - OsidbApiV1StatusRetrieveResponse200OsidbData, -) -from ..models.osidb_api_v1_status_retrieve_response_200_osidb_service import ( - OsidbApiV1StatusRetrieveResponse200OsidbService, -) from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.osidb_api_v1_status_retrieve_response_200_osidb_data import ( + OsidbApiV1StatusRetrieveResponse200OsidbData, + ) + from ..models.osidb_api_v1_status_retrieve_response_200_osidb_service import ( + OsidbApiV1StatusRetrieveResponse200OsidbService, + ) + + T = TypeVar("T", bound="OsidbApiV1StatusRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1StatusRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + osidb_data (Union[Unset, OsidbApiV1StatusRetrieveResponse200OsidbData]): + osidb_service (Union[Unset, OsidbApiV1StatusRetrieveResponse200OsidbService]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET - osidb_data: Union[Unset, OsidbApiV1StatusRetrieveResponse200OsidbData] = UNSET - osidb_service: Union[Unset, OsidbApiV1StatusRetrieveResponse200OsidbService] = UNSET + osidb_data: Union[Unset, "OsidbApiV1StatusRetrieveResponse200OsidbData"] = UNSET + osidb_service: Union[Unset, "OsidbApiV1StatusRetrieveResponse200OsidbService"] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env - osidb_data: Union[Unset, Dict[str, Any]] = UNSET + + osidb_data: Union[Unset, dict[str, Any]] = UNSET if not isinstance(self.osidb_data, Unset): osidb_data = self.osidb_data.to_dict() - osidb_service: Union[Unset, Dict[str, Any]] = UNSET + osidb_service: Union[Unset, dict[str, Any]] = UNSET if not isinstance(self.osidb_service, Unset): osidb_service = self.osidb_service.to_dict() revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -62,8 +76,16 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.osidb_api_v1_status_retrieve_response_200_osidb_data import ( + OsidbApiV1StatusRetrieveResponse200OsidbData, + ) + from ..models.osidb_api_v1_status_retrieve_response_200_osidb_service import ( + OsidbApiV1StatusRetrieveResponse200OsidbService, + ) + d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -73,23 +95,21 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: env = d.pop("env", UNSET) + # } _osidb_data = d.pop("osidb_data", UNSET) osidb_data: Union[Unset, OsidbApiV1StatusRetrieveResponse200OsidbData] if isinstance(_osidb_data, Unset): osidb_data = UNSET else: - osidb_data = OsidbApiV1StatusRetrieveResponse200OsidbData.from_dict( - _osidb_data - ) + osidb_data = OsidbApiV1StatusRetrieveResponse200OsidbData.from_dict(_osidb_data) + # } _osidb_service = d.pop("osidb_service", UNSET) osidb_service: Union[Unset, OsidbApiV1StatusRetrieveResponse200OsidbService] if isinstance(_osidb_service, Unset): osidb_service = UNSET else: - osidb_service = OsidbApiV1StatusRetrieveResponse200OsidbService.from_dict( - _osidb_service - ) + osidb_service = OsidbApiV1StatusRetrieveResponse200OsidbService.from_dict(_osidb_service) revision = d.pop("revision", UNSET) @@ -119,7 +139,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_status_retrieve_response_200_osidb_data.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_status_retrieve_response_200_osidb_data.py index b90bf44..0a35d8f 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_status_retrieve_response_200_osidb_data.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_status_retrieve_response_200_osidb_data.py @@ -1,23 +1,27 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import UNSET, OSIDBModel, Unset T = TypeVar("T", bound="OsidbApiV1StatusRetrieveResponse200OsidbData") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1StatusRetrieveResponse200OsidbData(OSIDBModel): - """ """ + """ + Attributes: + flaw_count (Union[Unset, int]): + """ flaw_count: Union[Unset, int] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: flaw_count = self.flaw_count - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(flaw_count, Unset): field_dict["flaw_count"] = flaw_count @@ -25,7 +29,7 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() flaw_count = d.pop("flaw_count", UNSET) @@ -43,7 +47,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_status_retrieve_response_200_osidb_service.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_status_retrieve_response_200_osidb_service.py index 991c6d7..c6b44ea 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_status_retrieve_response_200_osidb_service.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_status_retrieve_response_200_osidb_service.py @@ -1,33 +1,31 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import OSIDBModel T = TypeVar("T", bound="OsidbApiV1StatusRetrieveResponse200OsidbService") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1StatusRetrieveResponse200OsidbService(OSIDBModel): """ """ - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() osidb_api_v1_status_retrieve_response_200_osidb_service = cls() - osidb_api_v1_status_retrieve_response_200_osidb_service.additional_properties = ( - d - ) + osidb_api_v1_status_retrieve_response_200_osidb_service.additional_properties = d return osidb_api_v1_status_retrieve_response_200_osidb_service @staticmethod @@ -35,7 +33,7 @@ def get_fields(): return {} @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_create_response_201.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_create_response_201.py index e8fcf25..56c6579 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_create_response_201.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_create_response_201.py @@ -1,65 +1,98 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert -from ..models.erratum import Erratum from ..models.tracker_type import TrackerType from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + from ..models.erratum import Erratum + + T = TypeVar("T", bound="OsidbApiV1TrackersCreateResponse201") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1TrackersCreateResponse201(OSIDBModel): - """ """ - - errata: List[Erratum] + """ + Attributes: + errata (list['Erratum']): + ps_update_stream (str): + status (str): + resolution (str): + type_ (TrackerType): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affects (Union[Unset, list[UUID]]): + sync_to_bz (Union[Unset, bool]): Setting sync_to_bz to false disables flaw sync with Bugzilla after this + operation. Use only as part of bulk actions and trigger a flaw bugzilla sync afterwards. Does nothing if BZ is + disabled. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + errata: list["Erratum"] ps_update_stream: str status: str resolution: str - type: TrackerType - uuid: str + type_: TrackerType + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - affects: Union[Unset, List[str]] = UNSET + affects: Union[Unset, list[UUID]] = UNSET sync_to_bz: Union[Unset, bool] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - errata: List[Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + errata: list[dict[str, Any]] = UNSET if not isinstance(self.errata, Unset): errata = [] for errata_item_data in self.errata: - errata_item: Dict[str, Any] = UNSET + errata_item: dict[str, Any] = UNSET if not isinstance(errata_item_data, Unset): errata_item = errata_item_data.to_dict() errata.append(errata_item) ps_update_stream = self.ps_update_stream + status = self.status + resolution = self.resolution - type: str = UNSET - if not isinstance(self.type, Unset): - type = TrackerType(self.type).value + type_: str = UNSET + if not isinstance(self.type_, Unset): + type_ = TrackerType(self.type_).value + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - uuid = self.uuid embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -73,20 +106,29 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - affects: Union[Unset, List[str]] = UNSET + affects: Union[Unset, list[str]] = UNSET if not isinstance(self.affects, Unset): - affects = self.affects + affects = [] + for affects_item_data in self.affects: + affects_item: str = UNSET + if not isinstance(affects_item_data, Unset): + affects_item = str(affects_item_data) + + affects.append(affects_item) sync_to_bz = self.sync_to_bz + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(errata, Unset): field_dict["errata"] = errata @@ -96,8 +138,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["status"] = status if not isinstance(resolution, Unset): field_dict["resolution"] = resolution - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ if not isinstance(uuid, Unset): field_dict["uuid"] = uuid if not isinstance(embargoed, Unset): @@ -124,22 +166,23 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + from ..models.erratum import Erratum + d = src_dict.copy() errata = [] _errata = d.pop("errata", UNSET) - if _errata is UNSET: - errata = UNSET - else: - for errata_item_data in _errata or []: - _errata_item = errata_item_data - errata_item: Erratum - if isinstance(_errata_item, Unset): - errata_item = UNSET - else: - errata_item = Erratum.from_dict(_errata_item) + for errata_item_data in _errata or []: + # } + _errata_item = errata_item_data + errata_item: Erratum + if isinstance(_errata_item, Unset): + errata_item = UNSET + else: + errata_item = Erratum.from_dict(_errata_item) - errata.append(errata_item) + errata.append(errata_item) ps_update_stream = d.pop("ps_update_stream", UNSET) @@ -147,32 +190,38 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: resolution = d.pop("resolution", UNSET) - _type = d.pop("type", UNSET) - type: TrackerType - if isinstance(_type, Unset): - type = UNSET + # } + _type_ = d.pop("type", UNSET) + type_: TrackerType + if isinstance(_type_, Unset): + type_ = UNSET else: - type = TrackerType(_type) + type_ = TrackerType(_type_) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -180,6 +229,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -187,10 +237,22 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - affects = cast(List[str], d.pop("affects", UNSET)) + affects = [] + _affects = d.pop("affects", UNSET) + for affects_item_data in _affects or []: + # } + _affects_item = affects_item_data + affects_item: UUID + if isinstance(_affects_item, Unset): + affects_item = UNSET + else: + affects_item = UUID(_affects_item) + + affects.append(affects_item) sync_to_bz = d.pop("sync_to_bz", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -209,7 +271,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: ps_update_stream=ps_update_stream, status=status, resolution=resolution, - type=type, + type_=type_, uuid=uuid, embargoed=embargoed, alerts=alerts, @@ -229,17 +291,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "errata": List[Erratum], + "errata": list["Erratum"], "ps_update_stream": str, "status": str, "resolution": str, "type": TrackerType, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "affects": List[str], + "affects": list[UUID], "sync_to_bz": bool, "dt": datetime.datetime, "env": str, @@ -248,7 +310,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_affectedness.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_affectedness.py index e7a884d..078417d 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_affectedness.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_affectedness.py @@ -2,10 +2,10 @@ class OsidbApiV1TrackersListAffectsAffectedness(str, Enum): - VALUE_0 = "" AFFECTED = "AFFECTED" NEW = "NEW" NOTAFFECTED = "NOTAFFECTED" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_flaw_impact.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_flaw_impact.py index 0b4e95d..320e93c 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_flaw_impact.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_flaw_impact.py @@ -2,11 +2,11 @@ class OsidbApiV1TrackersListAffectsFlawImpact(str, Enum): - VALUE_0 = "" CRITICAL = "CRITICAL" IMPORTANT = "IMPORTANT" LOW = "LOW" MODERATE = "MODERATE" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_flaw_source.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_flaw_source.py index 1c320ba..a5b2b53 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_flaw_source.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_flaw_source.py @@ -2,7 +2,6 @@ class OsidbApiV1TrackersListAffectsFlawSource(str, Enum): - VALUE_0 = "" ADOBE = "ADOBE" APPLE = "APPLE" ASF = "ASF" @@ -86,6 +85,7 @@ class OsidbApiV1TrackersListAffectsFlawSource(str, Enum): TWITTER = "TWITTER" UBUNTU = "UBUNTU" UPSTREAM = "UPSTREAM" + VALUE_0 = "" VENDORSEC = "VENDORSEC" VULNWATCH = "VULNWATCH" WIRESHARK = "WIRESHARK" diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_impact.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_impact.py index 81c89a9..dee2253 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_impact.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_impact.py @@ -2,11 +2,11 @@ class OsidbApiV1TrackersListAffectsImpact(str, Enum): - VALUE_0 = "" CRITICAL = "CRITICAL" IMPORTANT = "IMPORTANT" LOW = "LOW" MODERATE = "MODERATE" + VALUE_0 = "" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_resolution.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_resolution.py index 1f1d925..e878365 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_resolution.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_affects_resolution.py @@ -2,11 +2,11 @@ class OsidbApiV1TrackersListAffectsResolution(str, Enum): - VALUE_0 = "" DEFER = "DEFER" DELEGATED = "DELEGATED" FIX = "FIX" OOSS = "OOSS" + VALUE_0 = "" WONTFIX = "WONTFIX" WONTREPORT = "WONTREPORT" diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_order_item.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_order_item.py index 556fff2..91bb85d 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_order_item.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_order_item.py @@ -2,35 +2,6 @@ class OsidbApiV1TrackersListOrderItem(str, Enum): - VALUE_0 = "-affects__affectedness" - VALUE_1 = "-affects__created_dt" - VALUE_2 = "-affects__embargoed" - VALUE_3 = "-affects__flaw__components" - VALUE_4 = "-affects__flaw__created_dt" - VALUE_5 = "-affects__flaw__cve_id" - VALUE_6 = "-affects__flaw__cwe_id" - VALUE_7 = "-affects__flaw__embargoed" - VALUE_8 = "-affects__flaw__impact" - VALUE_9 = "-affects__flaw__reported_dt" - VALUE_10 = "-affects__flaw__source" - VALUE_11 = "-affects__flaw__unembargo_dt" - VALUE_12 = "-affects__flaw__updated_dt" - VALUE_13 = "-affects__flaw__uuid" - VALUE_14 = "-affects__impact" - VALUE_15 = "-affects__ps_component" - VALUE_16 = "-affects__ps_module" - VALUE_17 = "-affects__resolution" - VALUE_18 = "-affects__updated_dt" - VALUE_19 = "-affects__uuid" - VALUE_20 = "-created_dt" - VALUE_21 = "-embargoed" - VALUE_22 = "-external_system_id" - VALUE_23 = "-ps_update_stream" - VALUE_24 = "-resolution" - VALUE_25 = "-status" - VALUE_26 = "-type" - VALUE_27 = "-updated_dt" - VALUE_28 = "-uuid" AFFECTS_AFFECTEDNESS = "affects__affectedness" AFFECTS_CREATED_DT = "affects__created_dt" AFFECTS_EMBARGOED = "affects__embargoed" @@ -60,6 +31,35 @@ class OsidbApiV1TrackersListOrderItem(str, Enum): TYPE = "type" UPDATED_DT = "updated_dt" UUID = "uuid" + VALUE_0 = "-affects__affectedness" + VALUE_1 = "-affects__created_dt" + VALUE_10 = "-affects__flaw__source" + VALUE_11 = "-affects__flaw__unembargo_dt" + VALUE_12 = "-affects__flaw__updated_dt" + VALUE_13 = "-affects__flaw__uuid" + VALUE_14 = "-affects__impact" + VALUE_15 = "-affects__ps_component" + VALUE_16 = "-affects__ps_module" + VALUE_17 = "-affects__resolution" + VALUE_18 = "-affects__updated_dt" + VALUE_19 = "-affects__uuid" + VALUE_2 = "-affects__embargoed" + VALUE_20 = "-created_dt" + VALUE_21 = "-embargoed" + VALUE_22 = "-external_system_id" + VALUE_23 = "-ps_update_stream" + VALUE_24 = "-resolution" + VALUE_25 = "-status" + VALUE_26 = "-type" + VALUE_27 = "-updated_dt" + VALUE_28 = "-uuid" + VALUE_3 = "-affects__flaw__components" + VALUE_4 = "-affects__flaw__created_dt" + VALUE_5 = "-affects__flaw__cve_id" + VALUE_6 = "-affects__flaw__cwe_id" + VALUE_7 = "-affects__flaw__embargoed" + VALUE_8 = "-affects__flaw__impact" + VALUE_9 = "-affects__flaw__reported_dt" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_response_200.py index 5f79254..702e815 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_list_response_200.py @@ -1,52 +1,79 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.tracker import Tracker from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.tracker import Tracker + + T = TypeVar("T", bound="OsidbApiV1TrackersListResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1TrackersListResponse200(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['Tracker']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ count: int - results: List[Tracker] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET + results: list["Tracker"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -68,29 +95,44 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.tracker import Tracker + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Tracker - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Tracker.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) - + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Tracker + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Tracker.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -122,9 +164,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[Tracker], - "next": str, - "previous": str, + "results": list["Tracker"], + "next": Union[None, str], + "previous": Union[None, str], "dt": datetime.datetime, "env": str, "revision": str, @@ -132,7 +174,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_retrieve_response_200.py index 160963a..e3d398c 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_retrieve_response_200.py @@ -1,66 +1,100 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert -from ..models.erratum import Erratum from ..models.tracker_type import TrackerType from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + from ..models.erratum import Erratum + + T = TypeVar("T", bound="OsidbApiV1TrackersRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1TrackersRetrieveResponse200(OSIDBModel): - """ """ - - errata: List[Erratum] + """ + Attributes: + errata (list['Erratum']): + external_system_id (str): + status (str): + resolution (str): + type_ (TrackerType): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affects (Union[Unset, list[UUID]]): + ps_update_stream (Union[Unset, str]): + sync_to_bz (Union[Unset, bool]): Setting sync_to_bz to false disables flaw sync with Bugzilla after this + operation. Use only as part of bulk actions and trigger a flaw bugzilla sync afterwards. Does nothing if BZ is + disabled. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + errata: list["Erratum"] external_system_id: str status: str resolution: str - type: TrackerType - uuid: str + type_: TrackerType + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - affects: Union[Unset, List[str]] = UNSET + affects: Union[Unset, list[UUID]] = UNSET ps_update_stream: Union[Unset, str] = UNSET sync_to_bz: Union[Unset, bool] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - errata: List[Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + errata: list[dict[str, Any]] = UNSET if not isinstance(self.errata, Unset): errata = [] for errata_item_data in self.errata: - errata_item: Dict[str, Any] = UNSET + errata_item: dict[str, Any] = UNSET if not isinstance(errata_item_data, Unset): errata_item = errata_item_data.to_dict() errata.append(errata_item) external_system_id = self.external_system_id + status = self.status + resolution = self.resolution - type: str = UNSET - if not isinstance(self.type, Unset): - type = TrackerType(self.type).value + type_: str = UNSET + if not isinstance(self.type_, Unset): + type_ = TrackerType(self.type_).value + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - uuid = self.uuid embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -74,21 +108,31 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - affects: Union[Unset, List[str]] = UNSET + affects: Union[Unset, list[str]] = UNSET if not isinstance(self.affects, Unset): - affects = self.affects + affects = [] + for affects_item_data in self.affects: + affects_item: str = UNSET + if not isinstance(affects_item_data, Unset): + affects_item = str(affects_item_data) + + affects.append(affects_item) ps_update_stream = self.ps_update_stream + sync_to_bz = self.sync_to_bz + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(errata, Unset): field_dict["errata"] = errata @@ -98,8 +142,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["status"] = status if not isinstance(resolution, Unset): field_dict["resolution"] = resolution - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ if not isinstance(uuid, Unset): field_dict["uuid"] = uuid if not isinstance(embargoed, Unset): @@ -128,22 +172,23 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + from ..models.erratum import Erratum + d = src_dict.copy() errata = [] _errata = d.pop("errata", UNSET) - if _errata is UNSET: - errata = UNSET - else: - for errata_item_data in _errata or []: - _errata_item = errata_item_data - errata_item: Erratum - if isinstance(_errata_item, Unset): - errata_item = UNSET - else: - errata_item = Erratum.from_dict(_errata_item) + for errata_item_data in _errata or []: + # } + _errata_item = errata_item_data + errata_item: Erratum + if isinstance(_errata_item, Unset): + errata_item = UNSET + else: + errata_item = Erratum.from_dict(_errata_item) - errata.append(errata_item) + errata.append(errata_item) external_system_id = d.pop("external_system_id", UNSET) @@ -151,32 +196,38 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: resolution = d.pop("resolution", UNSET) - _type = d.pop("type", UNSET) - type: TrackerType - if isinstance(_type, Unset): - type = UNSET + # } + _type_ = d.pop("type", UNSET) + type_: TrackerType + if isinstance(_type_, Unset): + type_ = UNSET else: - type = TrackerType(_type) + type_ = TrackerType(_type_) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -184,6 +235,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -191,12 +243,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - affects = cast(List[str], d.pop("affects", UNSET)) + affects = [] + _affects = d.pop("affects", UNSET) + for affects_item_data in _affects or []: + # } + _affects_item = affects_item_data + affects_item: UUID + if isinstance(_affects_item, Unset): + affects_item = UNSET + else: + affects_item = UUID(_affects_item) + + affects.append(affects_item) ps_update_stream = d.pop("ps_update_stream", UNSET) sync_to_bz = d.pop("sync_to_bz", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -215,7 +279,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: external_system_id=external_system_id, status=status, resolution=resolution, - type=type, + type_=type_, uuid=uuid, embargoed=embargoed, alerts=alerts, @@ -236,17 +300,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "errata": List[Erratum], + "errata": list["Erratum"], "external_system_id": str, "status": str, "resolution": str, "type": TrackerType, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "affects": List[str], + "affects": list[UUID], "ps_update_stream": str, "sync_to_bz": bool, "dt": datetime.datetime, @@ -256,7 +320,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_update_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_update_response_200.py index fbfab9f..6635b23 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_update_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_api_v1_trackers_update_response_200.py @@ -1,66 +1,100 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert -from ..models.erratum import Erratum from ..models.tracker_type import TrackerType from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + from ..models.erratum import Erratum + + T = TypeVar("T", bound="OsidbApiV1TrackersUpdateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbApiV1TrackersUpdateResponse200(OSIDBModel): - """ """ - - errata: List[Erratum] + """ + Attributes: + errata (list['Erratum']): + external_system_id (str): + status (str): + resolution (str): + type_ (TrackerType): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affects (Union[Unset, list[UUID]]): + ps_update_stream (Union[Unset, str]): + sync_to_bz (Union[Unset, bool]): Setting sync_to_bz to false disables flaw sync with Bugzilla after this + operation. Use only as part of bulk actions and trigger a flaw bugzilla sync afterwards. Does nothing if BZ is + disabled. + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + errata: list["Erratum"] external_system_id: str status: str resolution: str - type: TrackerType - uuid: str + type_: TrackerType + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - affects: Union[Unset, List[str]] = UNSET + affects: Union[Unset, list[UUID]] = UNSET ps_update_stream: Union[Unset, str] = UNSET sync_to_bz: Union[Unset, bool] = UNSET dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - errata: List[Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + errata: list[dict[str, Any]] = UNSET if not isinstance(self.errata, Unset): errata = [] for errata_item_data in self.errata: - errata_item: Dict[str, Any] = UNSET + errata_item: dict[str, Any] = UNSET if not isinstance(errata_item_data, Unset): errata_item = errata_item_data.to_dict() errata.append(errata_item) external_system_id = self.external_system_id + status = self.status + resolution = self.resolution - type: str = UNSET - if not isinstance(self.type, Unset): - type = TrackerType(self.type).value + type_: str = UNSET + if not isinstance(self.type_, Unset): + type_ = TrackerType(self.type_).value + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - uuid = self.uuid embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -74,21 +108,31 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - affects: Union[Unset, List[str]] = UNSET + affects: Union[Unset, list[str]] = UNSET if not isinstance(self.affects, Unset): - affects = self.affects + affects = [] + for affects_item_data in self.affects: + affects_item: str = UNSET + if not isinstance(affects_item_data, Unset): + affects_item = str(affects_item_data) + + affects.append(affects_item) ps_update_stream = self.ps_update_stream + sync_to_bz = self.sync_to_bz + dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(errata, Unset): field_dict["errata"] = errata @@ -98,8 +142,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["status"] = status if not isinstance(resolution, Unset): field_dict["resolution"] = resolution - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ if not isinstance(uuid, Unset): field_dict["uuid"] = uuid if not isinstance(embargoed, Unset): @@ -128,22 +172,23 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + from ..models.erratum import Erratum + d = src_dict.copy() errata = [] _errata = d.pop("errata", UNSET) - if _errata is UNSET: - errata = UNSET - else: - for errata_item_data in _errata or []: - _errata_item = errata_item_data - errata_item: Erratum - if isinstance(_errata_item, Unset): - errata_item = UNSET - else: - errata_item = Erratum.from_dict(_errata_item) + for errata_item_data in _errata or []: + # } + _errata_item = errata_item_data + errata_item: Erratum + if isinstance(_errata_item, Unset): + errata_item = UNSET + else: + errata_item = Erratum.from_dict(_errata_item) - errata.append(errata_item) + errata.append(errata_item) external_system_id = d.pop("external_system_id", UNSET) @@ -151,32 +196,38 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: resolution = d.pop("resolution", UNSET) - _type = d.pop("type", UNSET) - type: TrackerType - if isinstance(_type, Unset): - type = UNSET + # } + _type_ = d.pop("type", UNSET) + type_: TrackerType + if isinstance(_type_, Unset): + type_ = UNSET else: - type = TrackerType(_type) + type_ = TrackerType(_type_) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -184,6 +235,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -191,12 +243,24 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - affects = cast(List[str], d.pop("affects", UNSET)) + affects = [] + _affects = d.pop("affects", UNSET) + for affects_item_data in _affects or []: + # } + _affects_item = affects_item_data + affects_item: UUID + if isinstance(_affects_item, Unset): + affects_item = UNSET + else: + affects_item = UUID(_affects_item) + + affects.append(affects_item) ps_update_stream = d.pop("ps_update_stream", UNSET) sync_to_bz = d.pop("sync_to_bz", UNSET) + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -215,7 +279,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: external_system_id=external_system_id, status=status, resolution=resolution, - type=type, + type_=type_, uuid=uuid, embargoed=embargoed, alerts=alerts, @@ -236,17 +300,17 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "errata": List[Erratum], + "errata": list["Erratum"], "external_system_id": str, "status": str, "resolution": str, "type": TrackerType, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "affects": List[str], + "affects": list[UUID], "ps_update_stream": str, "sync_to_bz": bool, "dt": datetime.datetime, @@ -256,7 +320,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_healthy_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_healthy_retrieve_response_200.py index 8f60a1e..4a0b87f 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_healthy_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_healthy_retrieve_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="OsidbHealthyRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbHealthyRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_whoami_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/osidb_whoami_retrieve_response_200.py index 8af997a..672f9d8 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_whoami_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/osidb_whoami_retrieve_response_200.py @@ -1,51 +1,69 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.osidb_whoami_retrieve_response_200_profile import ( - OsidbWhoamiRetrieveResponse200Profile, -) from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.osidb_whoami_retrieve_response_200_profile import ( + OsidbWhoamiRetrieveResponse200Profile, + ) + + T = TypeVar("T", bound="OsidbWhoamiRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbWhoamiRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + email (Union[Unset, str]): + env (Union[Unset, str]): + groups (Union[Unset, list[str]]): + profile (Union[Unset, OsidbWhoamiRetrieveResponse200Profile]): + revision (Union[Unset, str]): + username (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET email: Union[Unset, str] = UNSET env: Union[Unset, str] = UNSET - groups: Union[Unset, List[str]] = UNSET - profile: Union[Unset, OsidbWhoamiRetrieveResponse200Profile] = UNSET + groups: Union[Unset, list[str]] = UNSET + profile: Union[Unset, "OsidbWhoamiRetrieveResponse200Profile"] = UNSET revision: Union[Unset, str] = UNSET username: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() email = self.email + env = self.env - groups: Union[Unset, List[str]] = UNSET + + groups: Union[Unset, list[str]] = UNSET if not isinstance(self.groups, Unset): groups = self.groups - profile: Union[Unset, Dict[str, Any]] = UNSET + profile: Union[Unset, dict[str, Any]] = UNSET if not isinstance(self.profile, Unset): profile = self.profile.to_dict() revision = self.revision + username = self.username + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -67,8 +85,13 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.osidb_whoami_retrieve_response_200_profile import ( + OsidbWhoamiRetrieveResponse200Profile, + ) + d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -80,8 +103,9 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: env = d.pop("env", UNSET) - groups = cast(List[str], d.pop("groups", UNSET)) + groups = cast(list[str], d.pop("groups", UNSET)) + # } _profile = d.pop("profile", UNSET) profile: Union[Unset, OsidbWhoamiRetrieveResponse200Profile] if isinstance(_profile, Unset): @@ -115,7 +139,7 @@ def get_fields(): "dt": datetime.datetime, "email": str, "env": str, - "groups": List[str], + "groups": list[str], "profile": OsidbWhoamiRetrieveResponse200Profile, "revision": str, "username": str, @@ -123,7 +147,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/osidb_whoami_retrieve_response_200_profile.py b/osidb_bindings/bindings/python_client/models/osidb_whoami_retrieve_response_200_profile.py index a554e68..96176f2 100644 --- a/osidb_bindings/bindings/python_client/models/osidb_whoami_retrieve_response_200_profile.py +++ b/osidb_bindings/bindings/python_client/models/osidb_whoami_retrieve_response_200_profile.py @@ -1,25 +1,31 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import UNSET, OSIDBModel, Unset T = TypeVar("T", bound="OsidbWhoamiRetrieveResponse200Profile") -@attr.s(auto_attribs=True) +@_attrs_define class OsidbWhoamiRetrieveResponse200Profile(OSIDBModel): - """ """ + """ + Attributes: + bz_user_id (Union[Unset, str]): + jira_user_id (Union[Unset, str]): + """ bz_user_id: Union[Unset, str] = UNSET jira_user_id: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: bz_user_id = self.bz_user_id + jira_user_id = self.jira_user_id - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(bz_user_id, Unset): field_dict["bz_user_id"] = bz_user_id @@ -29,7 +35,7 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() bz_user_id = d.pop("bz_user_id", UNSET) @@ -51,7 +57,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/package.py b/osidb_bindings/bindings/python_client/models/package.py index 5762e46..ad3abdf 100644 --- a/osidb_bindings/bindings/python_client/models/package.py +++ b/osidb_bindings/bindings/python_client/models/package.py @@ -1,46 +1,57 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.alert import Alert -from ..models.package_ver import PackageVer from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + from ..models.package_ver import PackageVer + + T = TypeVar("T", bound="Package") -@attr.s(auto_attribs=True) +@_attrs_define class Package(OSIDBModel): - """package_versions (Package model) serializer for read-only use in FlawSerializer.""" + """package_versions (Package model) serializer for read-only use in FlawSerializer. + + Attributes: + package (str): + versions (list['PackageVer']): + alerts (list['Alert']): + """ package: str - versions: List[PackageVer] - alerts: List[Alert] - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + versions: list["PackageVer"] + alerts: list["Alert"] + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: package = self.package - versions: List[Dict[str, Any]] = UNSET + + versions: list[dict[str, Any]] = UNSET if not isinstance(self.versions, Unset): versions = [] for versions_item_data in self.versions: - versions_item: Dict[str, Any] = UNSET + versions_item: dict[str, Any] = UNSET if not isinstance(versions_item_data, Unset): versions_item = versions_item_data.to_dict() versions.append(versions_item) - alerts: List[Dict[str, Any]] = UNSET + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() alerts.append(alerts_item) - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(package, Unset): field_dict["package"] = package @@ -52,39 +63,38 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + from ..models.package_ver import PackageVer + d = src_dict.copy() package = d.pop("package", UNSET) versions = [] _versions = d.pop("versions", UNSET) - if _versions is UNSET: - versions = UNSET - else: - for versions_item_data in _versions or []: - _versions_item = versions_item_data - versions_item: PackageVer - if isinstance(_versions_item, Unset): - versions_item = UNSET - else: - versions_item = PackageVer.from_dict(_versions_item) + for versions_item_data in _versions or []: + # } + _versions_item = versions_item_data + versions_item: PackageVer + if isinstance(_versions_item, Unset): + versions_item = UNSET + else: + versions_item = PackageVer.from_dict(_versions_item) - versions.append(versions_item) + versions.append(versions_item) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) - alerts.append(alerts_item) + alerts.append(alerts_item) package = cls( package=package, @@ -99,12 +109,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "package": str, - "versions": List[PackageVer], - "alerts": List[Alert], + "versions": list["PackageVer"], + "alerts": list["Alert"], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/package_ver.py b/osidb_bindings/bindings/python_client/models/package_ver.py index 03914fd..657e43a 100644 --- a/osidb_bindings/bindings/python_client/models/package_ver.py +++ b/osidb_bindings/bindings/python_client/models/package_ver.py @@ -1,26 +1,33 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import UNSET, OSIDBModel, Unset T = TypeVar("T", bound="PackageVer") -@attr.s(auto_attribs=True) +@_attrs_define class PackageVer(OSIDBModel): """PackageVer model serializer for read-only use in FlawSerializer via - PackageVerSerializer.""" + PackageVerSerializer. + + Attributes: + version (str): + status (str): Default: 'UNAFFECTED'. + """ version: str status: str = "UNAFFECTED" - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: version = self.version + status = self.status - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(version, Unset): field_dict["version"] = version @@ -30,7 +37,7 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() version = d.pop("version", UNSET) @@ -52,7 +59,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_affect_cvss_list.py b/osidb_bindings/bindings/python_client/models/paginated_affect_cvss_list.py index 402cc33..2ba84bc 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_affect_cvss_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_affect_cvss_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.affect_cvss import AffectCVSS from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect_cvss import AffectCVSS + + T = TypeVar("T", bound="PaginatedAffectCVSSList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedAffectCVSSList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['AffectCVSS']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[AffectCVSS] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["AffectCVSS"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect_cvss import AffectCVSS + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: AffectCVSS - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = AffectCVSS.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: AffectCVSS + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = AffectCVSS.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_affect_cvss_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[AffectCVSS], - "next": str, - "previous": str, + "results": list["AffectCVSS"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_affect_list.py b/osidb_bindings/bindings/python_client/models/paginated_affect_list.py index b1a3e14..e6fd75a 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_affect_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_affect_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.affect import Affect from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect import Affect + + T = TypeVar("T", bound="PaginatedAffectList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedAffectList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['Affect']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[Affect] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["Affect"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Affect - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Affect.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Affect + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Affect.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_affect_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[Affect], - "next": str, - "previous": str, + "results": list["Affect"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_alert_list.py b/osidb_bindings/bindings/python_client/models/paginated_alert_list.py index 2fcb806..89c1ac4 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_alert_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_alert_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.alert import Alert from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + + T = TypeVar("T", bound="PaginatedAlertList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedAlertList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['Alert']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[Alert] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["Alert"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Alert - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Alert.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Alert + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Alert.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_alert_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[Alert], - "next": str, - "previous": str, + "results": list["Alert"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_audit_list.py b/osidb_bindings/bindings/python_client/models/paginated_audit_list.py index 5395aaa..ae262d6 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_audit_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_audit_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.audit import Audit from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.audit import Audit + + T = TypeVar("T", bound="PaginatedAuditList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedAuditList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['Audit']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[Audit] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["Audit"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.audit import Audit + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Audit - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Audit.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Audit + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Audit.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_audit_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[Audit], - "next": str, - "previous": str, + "results": list["Audit"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_epss_list.py b/osidb_bindings/bindings/python_client/models/paginated_epss_list.py index de02a9d..5daf72a 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_epss_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_epss_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.epss import EPSS from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.epss import EPSS + + T = TypeVar("T", bound="PaginatedEPSSList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedEPSSList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['EPSS']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[EPSS] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["EPSS"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.epss import EPSS + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: EPSS - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = EPSS.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: EPSS + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = EPSS.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_epss_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[EPSS], - "next": str, - "previous": str, + "results": list["EPSS"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_exploit_only_report_data_list.py b/osidb_bindings/bindings/python_client/models/paginated_exploit_only_report_data_list.py index 99e23e4..edcff9c 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_exploit_only_report_data_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_exploit_only_report_data_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.exploit_only_report_data import ExploitOnlyReportData from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.exploit_only_report_data import ExploitOnlyReportData + + T = TypeVar("T", bound="PaginatedExploitOnlyReportDataList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedExploitOnlyReportDataList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['ExploitOnlyReportData']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[ExploitOnlyReportData] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["ExploitOnlyReportData"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.exploit_only_report_data import ExploitOnlyReportData + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: ExploitOnlyReportData - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = ExploitOnlyReportData.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: ExploitOnlyReportData + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = ExploitOnlyReportData.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_exploit_only_report_data_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[ExploitOnlyReportData], - "next": str, - "previous": str, + "results": list["ExploitOnlyReportData"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_flaw_acknowledgment_list.py b/osidb_bindings/bindings/python_client/models/paginated_flaw_acknowledgment_list.py index a5f2169..f476d73 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_flaw_acknowledgment_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_flaw_acknowledgment_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.flaw_acknowledgment import FlawAcknowledgment from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_acknowledgment import FlawAcknowledgment + + T = TypeVar("T", bound="PaginatedFlawAcknowledgmentList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedFlawAcknowledgmentList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['FlawAcknowledgment']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[FlawAcknowledgment] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["FlawAcknowledgment"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_acknowledgment import FlawAcknowledgment + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: FlawAcknowledgment - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = FlawAcknowledgment.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: FlawAcknowledgment + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = FlawAcknowledgment.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_flaw_acknowledgment_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[FlawAcknowledgment], - "next": str, - "previous": str, + "results": list["FlawAcknowledgment"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_flaw_comment_list.py b/osidb_bindings/bindings/python_client/models/paginated_flaw_comment_list.py index 4fffa4d..b92e6b3 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_flaw_comment_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_flaw_comment_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.flaw_comment import FlawComment from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_comment import FlawComment + + T = TypeVar("T", bound="PaginatedFlawCommentList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedFlawCommentList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['FlawComment']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[FlawComment] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["FlawComment"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_comment import FlawComment + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: FlawComment - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = FlawComment.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: FlawComment + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = FlawComment.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_flaw_comment_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[FlawComment], - "next": str, - "previous": str, + "results": list["FlawComment"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_flaw_cvss_list.py b/osidb_bindings/bindings/python_client/models/paginated_flaw_cvss_list.py index 8ae6976..931384b 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_flaw_cvss_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_flaw_cvss_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.flaw_cvss import FlawCVSS from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_cvss import FlawCVSS + + T = TypeVar("T", bound="PaginatedFlawCVSSList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedFlawCVSSList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['FlawCVSS']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[FlawCVSS] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["FlawCVSS"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_cvss import FlawCVSS + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: FlawCVSS - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = FlawCVSS.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: FlawCVSS + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = FlawCVSS.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_flaw_cvss_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[FlawCVSS], - "next": str, - "previous": str, + "results": list["FlawCVSS"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_flaw_list.py b/osidb_bindings/bindings/python_client/models/paginated_flaw_list.py index bb041b9..4a3b024 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_flaw_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_flaw_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.flaw import Flaw from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw import Flaw + + T = TypeVar("T", bound="PaginatedFlawList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedFlawList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['Flaw']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[Flaw] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["Flaw"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw import Flaw + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Flaw - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Flaw.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Flaw + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Flaw.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_flaw_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[Flaw], - "next": str, - "previous": str, + "results": list["Flaw"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_flaw_package_version_list.py b/osidb_bindings/bindings/python_client/models/paginated_flaw_package_version_list.py index 1effbe8..4593f74 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_flaw_package_version_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_flaw_package_version_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.flaw_package_version import FlawPackageVersion from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_package_version import FlawPackageVersion + + T = TypeVar("T", bound="PaginatedFlawPackageVersionList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedFlawPackageVersionList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['FlawPackageVersion']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[FlawPackageVersion] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["FlawPackageVersion"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_package_version import FlawPackageVersion + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: FlawPackageVersion - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = FlawPackageVersion.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: FlawPackageVersion + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = FlawPackageVersion.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_flaw_package_version_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[FlawPackageVersion], - "next": str, - "previous": str, + "results": list["FlawPackageVersion"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_flaw_reference_list.py b/osidb_bindings/bindings/python_client/models/paginated_flaw_reference_list.py index 0e4a633..a0af094 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_flaw_reference_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_flaw_reference_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.flaw_reference import FlawReference from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_reference import FlawReference + + T = TypeVar("T", bound="PaginatedFlawReferenceList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedFlawReferenceList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['FlawReference']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[FlawReference] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["FlawReference"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_reference import FlawReference + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: FlawReference - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = FlawReference.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: FlawReference + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = FlawReference.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_flaw_reference_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[FlawReference], - "next": str, - "previous": str, + "results": list["FlawReference"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_flaw_report_data_list.py b/osidb_bindings/bindings/python_client/models/paginated_flaw_report_data_list.py index 321cd55..5adf316 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_flaw_report_data_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_flaw_report_data_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.flaw_report_data import FlawReportData from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.flaw_report_data import FlawReportData + + T = TypeVar("T", bound="PaginatedFlawReportDataList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedFlawReportDataList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['FlawReportData']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[FlawReportData] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["FlawReportData"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.flaw_report_data import FlawReportData + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: FlawReportData - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = FlawReportData.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: FlawReportData + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = FlawReportData.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_flaw_report_data_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[FlawReportData], - "next": str, - "previous": str, + "results": list["FlawReportData"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_supported_products_list.py b/osidb_bindings/bindings/python_client/models/paginated_supported_products_list.py index 394a9e6..2dbc027 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_supported_products_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_supported_products_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.supported_products import SupportedProducts from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.supported_products import SupportedProducts + + T = TypeVar("T", bound="PaginatedSupportedProductsList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedSupportedProductsList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['SupportedProducts']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[SupportedProducts] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["SupportedProducts"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.supported_products import SupportedProducts + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: SupportedProducts - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = SupportedProducts.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: SupportedProducts + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = SupportedProducts.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_supported_products_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[SupportedProducts], - "next": str, - "previous": str, + "results": list["SupportedProducts"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/paginated_tracker_list.py b/osidb_bindings/bindings/python_client/models/paginated_tracker_list.py index fc25d98..53afaf2 100644 --- a/osidb_bindings/bindings/python_client/models/paginated_tracker_list.py +++ b/osidb_bindings/bindings/python_client/models/paginated_tracker_list.py @@ -1,39 +1,59 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union, cast -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.tracker import Tracker from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.tracker import Tracker + + T = TypeVar("T", bound="PaginatedTrackerList") -@attr.s(auto_attribs=True) +@_attrs_define class PaginatedTrackerList(OSIDBModel): - """ """ + """ + Attributes: + count (int): Example: 123. + results (list['Tracker']): + next_ (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=400&limit=100. + previous (Union[None, Unset, str]): Example: http://api.example.org/accounts/?offset=200&limit=100. + """ count: int - results: List[Tracker] - next_: Union[Unset, None, str] = UNSET - previous: Union[Unset, None, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + results: list["Tracker"] + next_: Union[None, Unset, str] = UNSET + previous: Union[None, Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: count = self.count - results: List[Dict[str, Any]] = UNSET + + results: list[dict[str, Any]] = UNSET if not isinstance(self.results, Unset): results = [] for results_item_data in self.results: - results_item: Dict[str, Any] = UNSET + results_item: dict[str, Any] = UNSET if not isinstance(results_item_data, Unset): results_item = results_item_data.to_dict() results.append(results_item) - next_ = self.next_ - previous = self.previous + next_: Union[None, Unset, str] + if isinstance(self.next_, Unset): + next_ = UNSET + else: + next_ = self.next_ + + previous: Union[None, Unset, str] + if isinstance(self.previous, Unset): + previous = UNSET + else: + previous = self.previous - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(count, Unset): field_dict["count"] = count @@ -47,28 +67,42 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.tracker import Tracker + d = src_dict.copy() count = d.pop("count", UNSET) results = [] _results = d.pop("results", UNSET) - if _results is UNSET: - results = UNSET - else: - for results_item_data in _results or []: - _results_item = results_item_data - results_item: Tracker - if isinstance(_results_item, Unset): - results_item = UNSET - else: - results_item = Tracker.from_dict(_results_item) - - results.append(results_item) - - next_ = d.pop("next", UNSET) - - previous = d.pop("previous", UNSET) + for results_item_data in _results or []: + # } + _results_item = results_item_data + results_item: Tracker + if isinstance(_results_item, Unset): + results_item = UNSET + else: + results_item = Tracker.from_dict(_results_item) + + results.append(results_item) + + def _parse_next_(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + next_ = _parse_next_(d.pop("next", UNSET)) + + def _parse_previous(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + previous = _parse_previous(d.pop("previous", UNSET)) paginated_tracker_list = cls( count=count, @@ -84,13 +118,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def get_fields(): return { "count": int, - "results": List[Tracker], - "next": str, - "previous": str, + "results": list["Tracker"], + "next": Union[None, str], + "previous": Union[None, str], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/ps_stream_selection.py b/osidb_bindings/bindings/python_client/models/ps_stream_selection.py index 8eb4555..f2965b8 100644 --- a/osidb_bindings/bindings/python_client/models/ps_stream_selection.py +++ b/osidb_bindings/bindings/python_client/models/ps_stream_selection.py @@ -1,31 +1,43 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import UNSET, OSIDBModel, Unset T = TypeVar("T", bound="PsStreamSelection") -@attr.s(auto_attribs=True) +@_attrs_define class PsStreamSelection(OSIDBModel): - """ """ + """ + Attributes: + ps_update_stream (str): + selected (bool): + acked (bool): + eus (bool): + aus (bool): + """ ps_update_stream: str selected: bool acked: bool eus: bool aus: bool - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: ps_update_stream = self.ps_update_stream + selected = self.selected + acked = self.acked + eus = self.eus + aus = self.aus - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(ps_update_stream, Unset): field_dict["ps_update_stream"] = ps_update_stream @@ -41,7 +53,7 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() ps_update_stream = d.pop("ps_update_stream", UNSET) @@ -75,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/reject.py b/osidb_bindings/bindings/python_client/models/reject.py index 8739613..ba8d1ff 100644 --- a/osidb_bindings/bindings/python_client/models/reject.py +++ b/osidb_bindings/bindings/python_client/models/reject.py @@ -1,50 +1,48 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import UNSET, OSIDBModel, Unset T = TypeVar("T", bound="Reject") -@attr.s(auto_attribs=True) +@_attrs_define class Reject(OSIDBModel): - """Task rejection serializer""" + """Task rejection serializer + + Attributes: + reason (str): + """ reason: str - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: reason = self.reason - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(reason, Unset): field_dict["reason"] = reason return field_dict - def to_multipart(self) -> Dict[str, Any]: - reason = ( - self.reason - if self.reason is UNSET - else (None, str(self.reason), "text/plain") - ) + def to_multipart(self) -> dict[str, Any]: + reason = (None, str(self.reason).encode(), "text/plain") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(reason, Unset): field_dict["reason"] = reason return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() reason = d.pop("reason", UNSET) @@ -62,7 +60,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/requires_cve_description_enum.py b/osidb_bindings/bindings/python_client/models/requires_cve_description_enum.py index 7df54fc..ab3b333 100644 --- a/osidb_bindings/bindings/python_client/models/requires_cve_description_enum.py +++ b/osidb_bindings/bindings/python_client/models/requires_cve_description_enum.py @@ -2,9 +2,9 @@ class RequiresCveDescriptionEnum(str, Enum): - REQUESTED = "REQUESTED" APPROVED = "APPROVED" REJECTED = "REJECTED" + REQUESTED = "REQUESTED" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/resolution_enum.py b/osidb_bindings/bindings/python_client/models/resolution_enum.py index 8b71524..972dcd5 100644 --- a/osidb_bindings/bindings/python_client/models/resolution_enum.py +++ b/osidb_bindings/bindings/python_client/models/resolution_enum.py @@ -2,11 +2,11 @@ class ResolutionEnum(str, Enum): - FIX = "FIX" DEFER = "DEFER" - WONTFIX = "WONTFIX" - OOSS = "OOSS" DELEGATED = "DELEGATED" + FIX = "FIX" + OOSS = "OOSS" + WONTFIX = "WONTFIX" WONTREPORT = "WONTREPORT" def __str__(self) -> str: diff --git a/osidb_bindings/bindings/python_client/models/supported_products.py b/osidb_bindings/bindings/python_client/models/supported_products.py index 0c50e1b..33927d4 100644 --- a/osidb_bindings/bindings/python_client/models/supported_products.py +++ b/osidb_bindings/bindings/python_client/models/supported_products.py @@ -1,23 +1,27 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import UNSET, OSIDBModel, Unset T = TypeVar("T", bound="SupportedProducts") -@attr.s(auto_attribs=True) +@_attrs_define class SupportedProducts(OSIDBModel): - """ """ + """ + Attributes: + name (str): + """ name: str - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: name = self.name - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(name, Unset): field_dict["name"] = name @@ -25,7 +29,7 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() name = d.pop("name", UNSET) @@ -43,7 +47,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/token_obtain_pair.py b/osidb_bindings/bindings/python_client/models/token_obtain_pair.py index 478d876..8170e18 100644 --- a/osidb_bindings/bindings/python_client/models/token_obtain_pair.py +++ b/osidb_bindings/bindings/python_client/models/token_obtain_pair.py @@ -1,29 +1,39 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import UNSET, OSIDBModel, Unset T = TypeVar("T", bound="TokenObtainPair") -@attr.s(auto_attribs=True) +@_attrs_define class TokenObtainPair(OSIDBModel): - """ """ + """ + Attributes: + username (str): + password (str): + access (str): + refresh (str): + """ username: str password: str access: str refresh: str - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: username = self.username + password = self.password + access = self.access + refresh = self.refresh - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(username, Unset): field_dict["username"] = username @@ -36,35 +46,19 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - username = ( - self.username - if self.username is UNSET - else (None, str(self.username), "text/plain") - ) - password = ( - self.password - if self.password is UNSET - else (None, str(self.password), "text/plain") - ) - access = ( - self.access - if self.access is UNSET - else (None, str(self.access), "text/plain") - ) - refresh = ( - self.refresh - if self.refresh is UNSET - else (None, str(self.refresh), "text/plain") - ) + def to_multipart(self) -> dict[str, Any]: + username = (None, str(self.username).encode(), "text/plain") + + password = (None, str(self.password).encode(), "text/plain") + + access = (None, str(self.access).encode(), "text/plain") + + refresh = (None, str(self.refresh).encode(), "text/plain") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(username, Unset): field_dict["username"] = username if not isinstance(password, Unset): @@ -77,7 +71,7 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() username = d.pop("username", UNSET) @@ -107,7 +101,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/token_refresh.py b/osidb_bindings/bindings/python_client/models/token_refresh.py index f714d53..08e80b0 100644 --- a/osidb_bindings/bindings/python_client/models/token_refresh.py +++ b/osidb_bindings/bindings/python_client/models/token_refresh.py @@ -1,25 +1,31 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import UNSET, OSIDBModel, Unset T = TypeVar("T", bound="TokenRefresh") -@attr.s(auto_attribs=True) +@_attrs_define class TokenRefresh(OSIDBModel): - """ """ + """ + Attributes: + access (str): + refresh (str): + """ access: str refresh: str - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: access = self.access + refresh = self.refresh - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(access, Unset): field_dict["access"] = access @@ -28,25 +34,15 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - access = ( - self.access - if self.access is UNSET - else (None, str(self.access), "text/plain") - ) - refresh = ( - self.refresh - if self.refresh is UNSET - else (None, str(self.refresh), "text/plain") - ) + def to_multipart(self) -> dict[str, Any]: + access = (None, str(self.access).encode(), "text/plain") + + refresh = (None, str(self.refresh).encode(), "text/plain") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(access, Unset): field_dict["access"] = access if not isinstance(refresh, Unset): @@ -55,7 +51,7 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() access = d.pop("access", UNSET) @@ -77,7 +73,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/token_verify.py b/osidb_bindings/bindings/python_client/models/token_verify.py index 90316e3..f632fcd 100644 --- a/osidb_bindings/bindings/python_client/models/token_verify.py +++ b/osidb_bindings/bindings/python_client/models/token_verify.py @@ -1,48 +1,47 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..types import UNSET, OSIDBModel, Unset T = TypeVar("T", bound="TokenVerify") -@attr.s(auto_attribs=True) +@_attrs_define class TokenVerify(OSIDBModel): - """ """ + """ + Attributes: + token (str): + """ token: str - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: token = self.token - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(token, Unset): field_dict["token"] = token return field_dict - def to_multipart(self) -> Dict[str, Any]: - token = ( - self.token if self.token is UNSET else (None, str(self.token), "text/plain") - ) + def to_multipart(self) -> dict[str, Any]: + token = (None, str(self.token).encode(), "text/plain") + + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) if not isinstance(token, Unset): field_dict["token"] = token return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() token = d.pop("token", UNSET) @@ -60,7 +59,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/tracker.py b/osidb_bindings/bindings/python_client/models/tracker.py index 0a99894..b73e571 100644 --- a/osidb_bindings/bindings/python_client/models/tracker.py +++ b/osidb_bindings/bindings/python_client/models/tracker.py @@ -1,63 +1,94 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert -from ..models.erratum import Erratum from ..models.tracker_type import TrackerType from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + from ..models.erratum import Erratum + + T = TypeVar("T", bound="Tracker") -@attr.s(auto_attribs=True) +@_attrs_define class Tracker(OSIDBModel): - """Tracker serializer""" - - errata: List[Erratum] + """Tracker serializer + + Attributes: + errata (list['Erratum']): + external_system_id (str): + status (str): + resolution (str): + type_ (TrackerType): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affects (Union[Unset, list[UUID]]): + ps_update_stream (Union[Unset, str]): + sync_to_bz (Union[Unset, bool]): Setting sync_to_bz to false disables flaw sync with Bugzilla after this + operation. Use only as part of bulk actions and trigger a flaw bugzilla sync afterwards. Does nothing if BZ is + disabled. + """ + + errata: list["Erratum"] external_system_id: str status: str resolution: str - type: TrackerType - uuid: str + type_: TrackerType + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - affects: Union[Unset, List[str]] = UNSET + affects: Union[Unset, list[UUID]] = UNSET ps_update_stream: Union[Unset, str] = UNSET sync_to_bz: Union[Unset, bool] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - errata: List[Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + errata: list[dict[str, Any]] = UNSET if not isinstance(self.errata, Unset): errata = [] for errata_item_data in self.errata: - errata_item: Dict[str, Any] = UNSET + errata_item: dict[str, Any] = UNSET if not isinstance(errata_item_data, Unset): errata_item = errata_item_data.to_dict() errata.append(errata_item) external_system_id = self.external_system_id + status = self.status + resolution = self.resolution - type: str = UNSET - if not isinstance(self.type, Unset): - type = TrackerType(self.type).value + type_: str = UNSET + if not isinstance(self.type_, Unset): + type_ = TrackerType(self.type_).value + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - uuid = self.uuid embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -71,14 +102,21 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - affects: Union[Unset, List[str]] = UNSET + affects: Union[Unset, list[str]] = UNSET if not isinstance(self.affects, Unset): - affects = self.affects + affects = [] + for affects_item_data in self.affects: + affects_item: str = UNSET + if not isinstance(affects_item_data, Unset): + affects_item = str(affects_item_data) + + affects.append(affects_item) ps_update_stream = self.ps_update_stream + sync_to_bz = self.sync_to_bz - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(errata, Unset): field_dict["errata"] = errata @@ -88,8 +126,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["status"] = status if not isinstance(resolution, Unset): field_dict["resolution"] = resolution - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ if not isinstance(uuid, Unset): field_dict["uuid"] = uuid if not isinstance(embargoed, Unset): @@ -109,86 +147,81 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - errata: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + errata: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.errata, Unset): _temp_errata = [] for errata_item_data in self.errata: - errata_item: Dict[str, Any] = UNSET + errata_item: dict[str, Any] = UNSET if not isinstance(errata_item_data, Unset): errata_item = errata_item_data.to_dict() _temp_errata.append(errata_item) - errata = (None, json.dumps(_temp_errata), "application/json") + errata = (None, json.dumps(_temp_errata).encode(), "application/json") - external_system_id = ( - self.external_system_id - if self.external_system_id is UNSET - else (None, str(self.external_system_id), "text/plain") - ) - status = ( - self.status - if self.status is UNSET - else (None, str(self.status), "text/plain") - ) - resolution = ( - self.resolution - if self.resolution is UNSET - else (None, str(self.resolution), "text/plain") - ) - type: Union[Unset, Tuple[None, str, str]] = UNSET - if not isinstance(self.type, Unset): + external_system_id = (None, str(self.external_system_id).encode(), "text/plain") - type = TrackerType(self.type).value + status = (None, str(self.status).encode(), "text/plain") - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + resolution = (None, str(self.resolution).encode(), "text/plain") + + type_: Union[Unset, tuple[None, bytes, str]] = UNSET + if not isinstance(self.type_, Unset): + type_ = (None, str(self.type_.value).encode(), "text/plain") + # CHANGE END (3) #} + + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() - updated_dt: str = UNSET + updated_dt: bytes = UNSET if not isinstance(self.updated_dt, Unset): - updated_dt = self.updated_dt.isoformat() + updated_dt = self.updated_dt.isoformat().encode() - affects: Union[Unset, Tuple[None, str, str]] = UNSET + affects: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.affects, Unset): - _temp_affects = self.affects - affects = (None, json.dumps(_temp_affects), "application/json") + _temp_affects = [] + for affects_item_data in self.affects: + affects_item: str = UNSET + if not isinstance(affects_item_data, Unset): + affects_item = str(affects_item_data) + + _temp_affects.append(affects_item) + affects = (None, json.dumps(_temp_affects).encode(), "application/json") ps_update_stream = ( self.ps_update_stream - if self.ps_update_stream is UNSET - else (None, str(self.ps_update_stream), "text/plain") + if isinstance(self.ps_update_stream, Unset) + else (None, str(self.ps_update_stream).encode(), "text/plain") ) + sync_to_bz = ( self.sync_to_bz - if self.sync_to_bz is UNSET - else (None, str(self.sync_to_bz), "text/plain") + if isinstance(self.sync_to_bz, Unset) + else (None, str(self.sync_to_bz).encode(), "text/plain") ) - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") + if not isinstance(errata, Unset): field_dict["errata"] = errata if not isinstance(external_system_id, Unset): @@ -197,8 +230,8 @@ def to_multipart(self) -> Dict[str, Any]: field_dict["status"] = status if not isinstance(resolution, Unset): field_dict["resolution"] = resolution - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ if not isinstance(uuid, Unset): field_dict["uuid"] = uuid if not isinstance(embargoed, Unset): @@ -219,22 +252,23 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + from ..models.erratum import Erratum + d = src_dict.copy() errata = [] _errata = d.pop("errata", UNSET) - if _errata is UNSET: - errata = UNSET - else: - for errata_item_data in _errata or []: - _errata_item = errata_item_data - errata_item: Erratum - if isinstance(_errata_item, Unset): - errata_item = UNSET - else: - errata_item = Erratum.from_dict(_errata_item) + for errata_item_data in _errata or []: + # } + _errata_item = errata_item_data + errata_item: Erratum + if isinstance(_errata_item, Unset): + errata_item = UNSET + else: + errata_item = Erratum.from_dict(_errata_item) - errata.append(errata_item) + errata.append(errata_item) external_system_id = d.pop("external_system_id", UNSET) @@ -242,32 +276,38 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: resolution = d.pop("resolution", UNSET) - _type = d.pop("type", UNSET) - type: TrackerType - if isinstance(_type, Unset): - type = UNSET + # } + _type_ = d.pop("type", UNSET) + type_: TrackerType + if isinstance(_type_, Unset): + type_ = UNSET else: - type = TrackerType(_type) + type_ = TrackerType(_type_) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -275,6 +315,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -282,7 +323,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - affects = cast(List[str], d.pop("affects", UNSET)) + affects = [] + _affects = d.pop("affects", UNSET) + for affects_item_data in _affects or []: + # } + _affects_item = affects_item_data + affects_item: UUID + if isinstance(_affects_item, Unset): + affects_item = UNSET + else: + affects_item = UUID(_affects_item) + + affects.append(affects_item) ps_update_stream = d.pop("ps_update_stream", UNSET) @@ -293,7 +345,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: external_system_id=external_system_id, status=status, resolution=resolution, - type=type, + type_=type_, uuid=uuid, embargoed=embargoed, alerts=alerts, @@ -310,23 +362,23 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "errata": List[Erratum], + "errata": list["Erratum"], "external_system_id": str, "status": str, "resolution": str, "type": TrackerType, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "affects": List[str], + "affects": list[UUID], "ps_update_stream": str, "sync_to_bz": bool, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/tracker_post.py b/osidb_bindings/bindings/python_client/models/tracker_post.py index c70968c..409a321 100644 --- a/osidb_bindings/bindings/python_client/models/tracker_post.py +++ b/osidb_bindings/bindings/python_client/models/tracker_post.py @@ -1,62 +1,92 @@ import datetime import json -from typing import Any, Dict, List, Tuple, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, Union +from uuid import UUID -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.alert import Alert -from ..models.erratum import Erratum from ..models.tracker_type import TrackerType from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.alert import Alert + from ..models.erratum import Erratum + + T = TypeVar("T", bound="TrackerPost") -@attr.s(auto_attribs=True) +@_attrs_define class TrackerPost(OSIDBModel): - """Tracker serializer""" - - errata: List[Erratum] + """Tracker serializer + + Attributes: + errata (list['Erratum']): + ps_update_stream (str): + status (str): + resolution (str): + type_ (TrackerType): + uuid (UUID): + embargoed (bool): The embargoed boolean attribute is technically read-only as it just indirectly modifies the + ACLs but is mandatory as it controls the access to the resource. + alerts (list['Alert']): + created_dt (datetime.datetime): + updated_dt (datetime.datetime): The updated_dt timestamp attribute is mandatory on update as it is used to + detect mit-air collisions. + affects (Union[Unset, list[UUID]]): + sync_to_bz (Union[Unset, bool]): Setting sync_to_bz to false disables flaw sync with Bugzilla after this + operation. Use only as part of bulk actions and trigger a flaw bugzilla sync afterwards. Does nothing if BZ is + disabled. + """ + + errata: list["Erratum"] ps_update_stream: str status: str resolution: str - type: TrackerType - uuid: str + type_: TrackerType + uuid: UUID embargoed: bool - alerts: List[Alert] + alerts: list["Alert"] created_dt: datetime.datetime updated_dt: datetime.datetime - affects: Union[Unset, List[str]] = UNSET + affects: Union[Unset, list[UUID]] = UNSET sync_to_bz: Union[Unset, bool] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - errata: List[Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + errata: list[dict[str, Any]] = UNSET if not isinstance(self.errata, Unset): errata = [] for errata_item_data in self.errata: - errata_item: Dict[str, Any] = UNSET + errata_item: dict[str, Any] = UNSET if not isinstance(errata_item_data, Unset): errata_item = errata_item_data.to_dict() errata.append(errata_item) ps_update_stream = self.ps_update_stream + status = self.status + resolution = self.resolution - type: str = UNSET - if not isinstance(self.type, Unset): - type = TrackerType(self.type).value + type_: str = UNSET + if not isinstance(self.type_, Unset): + type_ = TrackerType(self.type_).value + + uuid: str = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) - uuid = self.uuid embargoed = self.embargoed - alerts: List[Dict[str, Any]] = UNSET + + alerts: list[dict[str, Any]] = UNSET if not isinstance(self.alerts, Unset): alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() @@ -70,13 +100,19 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.updated_dt, Unset): updated_dt = self.updated_dt.isoformat() - affects: Union[Unset, List[str]] = UNSET + affects: Union[Unset, list[str]] = UNSET if not isinstance(self.affects, Unset): - affects = self.affects + affects = [] + for affects_item_data in self.affects: + affects_item: str = UNSET + if not isinstance(affects_item_data, Unset): + affects_item = str(affects_item_data) + + affects.append(affects_item) sync_to_bz = self.sync_to_bz - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(errata, Unset): field_dict["errata"] = errata @@ -86,8 +122,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["status"] = status if not isinstance(resolution, Unset): field_dict["resolution"] = resolution - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ if not isinstance(uuid, Unset): field_dict["uuid"] = uuid if not isinstance(embargoed, Unset): @@ -105,81 +141,75 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - errata: Union[Unset, Tuple[None, str, str]] = UNSET + def to_multipart(self) -> dict[str, Any]: + errata: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.errata, Unset): _temp_errata = [] for errata_item_data in self.errata: - errata_item: Dict[str, Any] = UNSET + errata_item: dict[str, Any] = UNSET if not isinstance(errata_item_data, Unset): errata_item = errata_item_data.to_dict() _temp_errata.append(errata_item) - errata = (None, json.dumps(_temp_errata), "application/json") + errata = (None, json.dumps(_temp_errata).encode(), "application/json") - ps_update_stream = ( - self.ps_update_stream - if self.ps_update_stream is UNSET - else (None, str(self.ps_update_stream), "text/plain") - ) - status = ( - self.status - if self.status is UNSET - else (None, str(self.status), "text/plain") - ) - resolution = ( - self.resolution - if self.resolution is UNSET - else (None, str(self.resolution), "text/plain") - ) - type: Union[Unset, Tuple[None, str, str]] = UNSET - if not isinstance(self.type, Unset): + ps_update_stream = (None, str(self.ps_update_stream).encode(), "text/plain") - type = TrackerType(self.type).value + status = (None, str(self.status).encode(), "text/plain") - uuid = self.uuid if self.uuid is UNSET else (None, str(self.uuid), "text/plain") - embargoed = ( - self.embargoed - if self.embargoed is UNSET - else (None, str(self.embargoed), "text/plain") - ) - alerts: Union[Unset, Tuple[None, str, str]] = UNSET + resolution = (None, str(self.resolution).encode(), "text/plain") + + type_: Union[Unset, tuple[None, bytes, str]] = UNSET + if not isinstance(self.type_, Unset): + type_ = (None, str(self.type_.value).encode(), "text/plain") + # CHANGE END (3) #} + + uuid: bytes = UNSET + if not isinstance(self.uuid, Unset): + uuid = str(self.uuid) + + embargoed = (None, str(self.embargoed).encode(), "text/plain") + + alerts: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.alerts, Unset): _temp_alerts = [] for alerts_item_data in self.alerts: - alerts_item: Dict[str, Any] = UNSET + alerts_item: dict[str, Any] = UNSET if not isinstance(alerts_item_data, Unset): alerts_item = alerts_item_data.to_dict() _temp_alerts.append(alerts_item) - alerts = (None, json.dumps(_temp_alerts), "application/json") + alerts = (None, json.dumps(_temp_alerts).encode(), "application/json") - created_dt: str = UNSET + created_dt: bytes = UNSET if not isinstance(self.created_dt, Unset): - created_dt = self.created_dt.isoformat() + created_dt = self.created_dt.isoformat().encode() - updated_dt: str = UNSET + updated_dt: bytes = UNSET if not isinstance(self.updated_dt, Unset): - updated_dt = self.updated_dt.isoformat() + updated_dt = self.updated_dt.isoformat().encode() - affects: Union[Unset, Tuple[None, str, str]] = UNSET + affects: Union[Unset, tuple[None, bytes, str]] = UNSET if not isinstance(self.affects, Unset): - _temp_affects = self.affects - affects = (None, json.dumps(_temp_affects), "application/json") + _temp_affects = [] + for affects_item_data in self.affects: + affects_item: str = UNSET + if not isinstance(affects_item_data, Unset): + affects_item = str(affects_item_data) + + _temp_affects.append(affects_item) + affects = (None, json.dumps(_temp_affects).encode(), "application/json") sync_to_bz = ( self.sync_to_bz - if self.sync_to_bz is UNSET - else (None, str(self.sync_to_bz), "text/plain") + if isinstance(self.sync_to_bz, Unset) + else (None, str(self.sync_to_bz).encode(), "text/plain") ) - field_dict: Dict[str, Any] = {} - field_dict.update( - { - key: (None, str(value), "text/plain") - for key, value in self.additional_properties.items() - } - ) + field_dict: dict[str, Any] = {} + for prop_name, prop in self.additional_properties.items(): + field_dict[prop_name] = (None, str(prop).encode(), "text/plain") + if not isinstance(errata, Unset): field_dict["errata"] = errata if not isinstance(ps_update_stream, Unset): @@ -188,8 +218,8 @@ def to_multipart(self) -> Dict[str, Any]: field_dict["status"] = status if not isinstance(resolution, Unset): field_dict["resolution"] = resolution - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ if not isinstance(uuid, Unset): field_dict["uuid"] = uuid if not isinstance(embargoed, Unset): @@ -208,22 +238,23 @@ def to_multipart(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.alert import Alert + from ..models.erratum import Erratum + d = src_dict.copy() errata = [] _errata = d.pop("errata", UNSET) - if _errata is UNSET: - errata = UNSET - else: - for errata_item_data in _errata or []: - _errata_item = errata_item_data - errata_item: Erratum - if isinstance(_errata_item, Unset): - errata_item = UNSET - else: - errata_item = Erratum.from_dict(_errata_item) + for errata_item_data in _errata or []: + # } + _errata_item = errata_item_data + errata_item: Erratum + if isinstance(_errata_item, Unset): + errata_item = UNSET + else: + errata_item = Erratum.from_dict(_errata_item) - errata.append(errata_item) + errata.append(errata_item) ps_update_stream = d.pop("ps_update_stream", UNSET) @@ -231,32 +262,38 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: resolution = d.pop("resolution", UNSET) - _type = d.pop("type", UNSET) - type: TrackerType - if isinstance(_type, Unset): - type = UNSET + # } + _type_ = d.pop("type", UNSET) + type_: TrackerType + if isinstance(_type_, Unset): + type_ = UNSET else: - type = TrackerType(_type) + type_ = TrackerType(_type_) - uuid = d.pop("uuid", UNSET) + # } + _uuid = d.pop("uuid", UNSET) + uuid: UUID + if isinstance(_uuid, Unset): + uuid = UNSET + else: + uuid = UUID(_uuid) embargoed = d.pop("embargoed", UNSET) alerts = [] _alerts = d.pop("alerts", UNSET) - if _alerts is UNSET: - alerts = UNSET - else: - for alerts_item_data in _alerts or []: - _alerts_item = alerts_item_data - alerts_item: Alert - if isinstance(_alerts_item, Unset): - alerts_item = UNSET - else: - alerts_item = Alert.from_dict(_alerts_item) - - alerts.append(alerts_item) - + for alerts_item_data in _alerts or []: + # } + _alerts_item = alerts_item_data + alerts_item: Alert + if isinstance(_alerts_item, Unset): + alerts_item = UNSET + else: + alerts_item = Alert.from_dict(_alerts_item) + + alerts.append(alerts_item) + + # } _created_dt = d.pop("created_dt", UNSET) created_dt: datetime.datetime if isinstance(_created_dt, Unset): @@ -264,6 +301,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: created_dt = isoparse(_created_dt) + # } _updated_dt = d.pop("updated_dt", UNSET) updated_dt: datetime.datetime if isinstance(_updated_dt, Unset): @@ -271,7 +309,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: updated_dt = isoparse(_updated_dt) - affects = cast(List[str], d.pop("affects", UNSET)) + affects = [] + _affects = d.pop("affects", UNSET) + for affects_item_data in _affects or []: + # } + _affects_item = affects_item_data + affects_item: UUID + if isinstance(_affects_item, Unset): + affects_item = UNSET + else: + affects_item = UUID(_affects_item) + + affects.append(affects_item) sync_to_bz = d.pop("sync_to_bz", UNSET) @@ -280,7 +329,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: ps_update_stream=ps_update_stream, status=status, resolution=resolution, - type=type, + type_=type_, uuid=uuid, embargoed=embargoed, alerts=alerts, @@ -296,22 +345,22 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "errata": List[Erratum], + "errata": list["Erratum"], "ps_update_stream": str, "status": str, "resolution": str, "type": TrackerType, - "uuid": str, + "uuid": UUID, "embargoed": bool, - "alerts": List[Alert], + "alerts": list["Alert"], "created_dt": datetime.datetime, "updated_dt": datetime.datetime, - "affects": List[str], + "affects": list[UUID], "sync_to_bz": bool, } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/tracker_report_data.py b/osidb_bindings/bindings/python_client/models/tracker_report_data.py index af83ebf..436a282 100644 --- a/osidb_bindings/bindings/python_client/models/tracker_report_data.py +++ b/osidb_bindings/bindings/python_client/models/tracker_report_data.py @@ -1,6 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from ..models.tracker_type import TrackerType from ..types import UNSET, OSIDBModel, Unset @@ -8,30 +9,37 @@ T = TypeVar("T", bound="TrackerReportData") -@attr.s(auto_attribs=True) +@_attrs_define class TrackerReportData(OSIDBModel): - """ """ - - type: TrackerType + """ + Attributes: + type_ (TrackerType): + external_system_id (str): + status (Union[Unset, str]): + resolution (Union[Unset, str]): + """ + + type_: TrackerType external_system_id: str status: Union[Unset, str] = UNSET resolution: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) - - def to_dict(self) -> Dict[str, Any]: - type: str = UNSET - if not isinstance(self.type, Unset): + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - type = TrackerType(self.type).value + def to_dict(self) -> dict[str, Any]: + type_: str = UNSET + if not isinstance(self.type_, Unset): + type_ = TrackerType(self.type_).value external_system_id = self.external_system_id + status = self.status + resolution = self.resolution - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) - if not isinstance(type, Unset): - field_dict["type"] = type + if not isinstance(type_, Unset): + field_dict["type"] = type_ if not isinstance(external_system_id, Unset): field_dict["external_system_id"] = external_system_id if not isinstance(status, Unset): @@ -42,14 +50,15 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() - _type = d.pop("type", UNSET) - type: TrackerType - if isinstance(_type, Unset): - type = UNSET + # } + _type_ = d.pop("type", UNSET) + type_: TrackerType + if isinstance(_type_, Unset): + type_ = UNSET else: - type = TrackerType(_type) + type_ = TrackerType(_type_) external_system_id = d.pop("external_system_id", UNSET) @@ -58,7 +67,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: resolution = d.pop("resolution", UNSET) tracker_report_data = cls( - type=type, + type_=type_, external_system_id=external_system_id, status=status, resolution=resolution, @@ -77,7 +86,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/tracker_suggestion.py b/osidb_bindings/bindings/python_client/models/tracker_suggestion.py index 434e81d..0767c4b 100644 --- a/osidb_bindings/bindings/python_client/models/tracker_suggestion.py +++ b/osidb_bindings/bindings/python_client/models/tracker_suggestion.py @@ -1,44 +1,52 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field -from ..models.affect import Affect -from ..models.module_component import ModuleComponent from ..types import UNSET, OSIDBModel, Unset -T = TypeVar("T", bound="TrackerSuggestion") +if TYPE_CHECKING: + from ..models.affect import Affect + from ..models.module_component import ModuleComponent -@attr.s(auto_attribs=True) -class TrackerSuggestion(OSIDBModel): - """ """ +T = TypeVar("T", bound="TrackerSuggestion") - modules_components: List[ModuleComponent] - not_applicable: List[Affect] - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - modules_components: List[Dict[str, Any]] = UNSET +@_attrs_define +class TrackerSuggestion(OSIDBModel): + """ + Attributes: + modules_components (list['ModuleComponent']): + not_applicable (list['Affect']): + """ + + modules_components: list["ModuleComponent"] + not_applicable: list["Affect"] + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + modules_components: list[dict[str, Any]] = UNSET if not isinstance(self.modules_components, Unset): modules_components = [] for modules_components_item_data in self.modules_components: - modules_components_item: Dict[str, Any] = UNSET + modules_components_item: dict[str, Any] = UNSET if not isinstance(modules_components_item_data, Unset): modules_components_item = modules_components_item_data.to_dict() modules_components.append(modules_components_item) - not_applicable: List[Dict[str, Any]] = UNSET + not_applicable: list[dict[str, Any]] = UNSET if not isinstance(self.not_applicable, Unset): not_applicable = [] for not_applicable_item_data in self.not_applicable: - not_applicable_item: Dict[str, Any] = UNSET + not_applicable_item: dict[str, Any] = UNSET if not isinstance(not_applicable_item_data, Unset): not_applicable_item = not_applicable_item_data.to_dict() not_applicable.append(not_applicable_item) - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(modules_components, Unset): field_dict["modules_components"] = modules_components @@ -48,39 +56,36 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + from ..models.module_component import ModuleComponent + d = src_dict.copy() modules_components = [] _modules_components = d.pop("modules_components", UNSET) - if _modules_components is UNSET: - modules_components = UNSET - else: - for modules_components_item_data in _modules_components or []: - _modules_components_item = modules_components_item_data - modules_components_item: ModuleComponent - if isinstance(_modules_components_item, Unset): - modules_components_item = UNSET - else: - modules_components_item = ModuleComponent.from_dict( - _modules_components_item - ) + for modules_components_item_data in _modules_components or []: + # } + _modules_components_item = modules_components_item_data + modules_components_item: ModuleComponent + if isinstance(_modules_components_item, Unset): + modules_components_item = UNSET + else: + modules_components_item = ModuleComponent.from_dict(_modules_components_item) - modules_components.append(modules_components_item) + modules_components.append(modules_components_item) not_applicable = [] _not_applicable = d.pop("not_applicable", UNSET) - if _not_applicable is UNSET: - not_applicable = UNSET - else: - for not_applicable_item_data in _not_applicable or []: - _not_applicable_item = not_applicable_item_data - not_applicable_item: Affect - if isinstance(_not_applicable_item, Unset): - not_applicable_item = UNSET - else: - not_applicable_item = Affect.from_dict(_not_applicable_item) + for not_applicable_item_data in _not_applicable or []: + # } + _not_applicable_item = not_applicable_item_data + not_applicable_item: Affect + if isinstance(_not_applicable_item, Unset): + not_applicable_item = UNSET + else: + not_applicable_item = Affect.from_dict(_not_applicable_item) - not_applicable.append(not_applicable_item) + not_applicable.append(not_applicable_item) tracker_suggestion = cls( modules_components=modules_components, @@ -93,12 +98,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "modules_components": List[ModuleComponent], - "not_applicable": List[Affect], + "modules_components": list["ModuleComponent"], + "not_applicable": list["Affect"], } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/tracker_type.py b/osidb_bindings/bindings/python_client/models/tracker_type.py index b5c51b6..1017c8e 100644 --- a/osidb_bindings/bindings/python_client/models/tracker_type.py +++ b/osidb_bindings/bindings/python_client/models/tracker_type.py @@ -2,8 +2,8 @@ class TrackerType(str, Enum): - JIRA = "JIRA" BUGZILLA = "BUGZILLA" + JIRA = "JIRA" def __str__(self) -> str: return str(self.value) diff --git a/osidb_bindings/bindings/python_client/models/trackers_api_v1_file_create_response_200.py b/osidb_bindings/bindings/python_client/models/trackers_api_v1_file_create_response_200.py index 6b03a8c..fd12afa 100644 --- a/osidb_bindings/bindings/python_client/models/trackers_api_v1_file_create_response_200.py +++ b/osidb_bindings/bindings/python_client/models/trackers_api_v1_file_create_response_200.py @@ -1,44 +1,56 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse -from ..models.affect import Affect -from ..models.module_component import ModuleComponent from ..types import UNSET, OSIDBModel, Unset +if TYPE_CHECKING: + from ..models.affect import Affect + from ..models.module_component import ModuleComponent + + T = TypeVar("T", bound="TrackersApiV1FileCreateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class TrackersApiV1FileCreateResponse200(OSIDBModel): - """ """ - - modules_components: List[ModuleComponent] - not_applicable: List[Affect] + """ + Attributes: + modules_components (list['ModuleComponent']): + not_applicable (list['Affect']): + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ + + modules_components: list["ModuleComponent"] + not_applicable: list["Affect"] dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - modules_components: List[Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + modules_components: list[dict[str, Any]] = UNSET if not isinstance(self.modules_components, Unset): modules_components = [] for modules_components_item_data in self.modules_components: - modules_components_item: Dict[str, Any] = UNSET + modules_components_item: dict[str, Any] = UNSET if not isinstance(modules_components_item_data, Unset): modules_components_item = modules_components_item_data.to_dict() modules_components.append(modules_components_item) - not_applicable: List[Dict[str, Any]] = UNSET + not_applicable: list[dict[str, Any]] = UNSET if not isinstance(self.not_applicable, Unset): not_applicable = [] for not_applicable_item_data in self.not_applicable: - not_applicable_item: Dict[str, Any] = UNSET + not_applicable_item: dict[str, Any] = UNSET if not isinstance(not_applicable_item_data, Unset): not_applicable_item = not_applicable_item_data.to_dict() @@ -49,10 +61,12 @@ def to_dict(self) -> Dict[str, Any]: dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(modules_components, Unset): field_dict["modules_components"] = modules_components @@ -70,40 +84,38 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: + from ..models.affect import Affect + from ..models.module_component import ModuleComponent + d = src_dict.copy() modules_components = [] _modules_components = d.pop("modules_components", UNSET) - if _modules_components is UNSET: - modules_components = UNSET - else: - for modules_components_item_data in _modules_components or []: - _modules_components_item = modules_components_item_data - modules_components_item: ModuleComponent - if isinstance(_modules_components_item, Unset): - modules_components_item = UNSET - else: - modules_components_item = ModuleComponent.from_dict( - _modules_components_item - ) + for modules_components_item_data in _modules_components or []: + # } + _modules_components_item = modules_components_item_data + modules_components_item: ModuleComponent + if isinstance(_modules_components_item, Unset): + modules_components_item = UNSET + else: + modules_components_item = ModuleComponent.from_dict(_modules_components_item) - modules_components.append(modules_components_item) + modules_components.append(modules_components_item) not_applicable = [] _not_applicable = d.pop("not_applicable", UNSET) - if _not_applicable is UNSET: - not_applicable = UNSET - else: - for not_applicable_item_data in _not_applicable or []: - _not_applicable_item = not_applicable_item_data - not_applicable_item: Affect - if isinstance(_not_applicable_item, Unset): - not_applicable_item = UNSET - else: - not_applicable_item = Affect.from_dict(_not_applicable_item) - - not_applicable.append(not_applicable_item) - + for not_applicable_item_data in _not_applicable or []: + # } + _not_applicable_item = not_applicable_item_data + not_applicable_item: Affect + if isinstance(_not_applicable_item, Unset): + not_applicable_item = UNSET + else: + not_applicable_item = Affect.from_dict(_not_applicable_item) + + not_applicable.append(not_applicable_item) + + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -132,8 +144,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: @staticmethod def get_fields(): return { - "modules_components": List[ModuleComponent], - "not_applicable": List[Affect], + "modules_components": list["ModuleComponent"], + "not_applicable": list["Affect"], "dt": datetime.datetime, "env": str, "revision": str, @@ -141,7 +153,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/workflows_api_v1_workflows_adjust_create_response_200.py b/osidb_bindings/bindings/python_client/models/workflows_api_v1_workflows_adjust_create_response_200.py index f420865..803d8ba 100644 --- a/osidb_bindings/bindings/python_client/models/workflows_api_v1_workflows_adjust_create_response_200.py +++ b/osidb_bindings/bindings/python_client/models/workflows_api_v1_workflows_adjust_create_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="WorkflowsApiV1WorkflowsAdjustCreateResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class WorkflowsApiV1WorkflowsAdjustCreateResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/workflows_api_v1_workflows_retrieve_2_response_200.py b/osidb_bindings/bindings/python_client/models/workflows_api_v1_workflows_retrieve_2_response_200.py index b559b9d..2fbcba5 100644 --- a/osidb_bindings/bindings/python_client/models/workflows_api_v1_workflows_retrieve_2_response_200.py +++ b/osidb_bindings/bindings/python_client/models/workflows_api_v1_workflows_retrieve_2_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="WorkflowsApiV1WorkflowsRetrieve2Response200") -@attr.s(auto_attribs=True) +@_attrs_define class WorkflowsApiV1WorkflowsRetrieve2Response200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/workflows_api_v1_workflows_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/workflows_api_v1_workflows_retrieve_response_200.py index b0c51c5..1dc5b3f 100644 --- a/osidb_bindings/bindings/python_client/models/workflows_api_v1_workflows_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/workflows_api_v1_workflows_retrieve_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="WorkflowsApiV1WorkflowsRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class WorkflowsApiV1WorkflowsRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/workflows_healthy_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/workflows_healthy_retrieve_response_200.py index b0ee479..9e2ab58 100644 --- a/osidb_bindings/bindings/python_client/models/workflows_healthy_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/workflows_healthy_retrieve_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="WorkflowsHealthyRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class WorkflowsHealthyRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/models/workflows_retrieve_response_200.py b/osidb_bindings/bindings/python_client/models/workflows_retrieve_response_200.py index ac58f7c..ab284e6 100644 --- a/osidb_bindings/bindings/python_client/models/workflows_retrieve_response_200.py +++ b/osidb_bindings/bindings/python_client/models/workflows_retrieve_response_200.py @@ -1,7 +1,8 @@ import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import Any, TypeVar, Union -import attr +from attrs import define as _attrs_define +from attrs import field as _attrs_field from dateutil.parser import isoparse from ..types import UNSET, OSIDBModel, Unset @@ -9,26 +10,34 @@ T = TypeVar("T", bound="WorkflowsRetrieveResponse200") -@attr.s(auto_attribs=True) +@_attrs_define class WorkflowsRetrieveResponse200(OSIDBModel): - """ """ + """ + Attributes: + dt (Union[Unset, datetime.datetime]): + env (Union[Unset, str]): + revision (Union[Unset, str]): + version (Union[Unset, str]): + """ dt: Union[Unset, datetime.datetime] = UNSET env: Union[Unset, str] = UNSET revision: Union[Unset, str] = UNSET version: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dt: Union[Unset, str] = UNSET if not isinstance(self.dt, Unset): dt = self.dt.isoformat() env = self.env + revision = self.revision + version = self.version - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) if not isinstance(dt, Unset): field_dict["dt"] = dt @@ -42,8 +51,9 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T: d = src_dict.copy() + # } _dt = d.pop("dt", UNSET) dt: Union[Unset, datetime.datetime] if isinstance(_dt, Unset): @@ -77,7 +87,7 @@ def get_fields(): } @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/osidb_bindings/bindings/python_client/types.py b/osidb_bindings/bindings/python_client/types.py index 0e4898a..9f96cf1 100644 --- a/osidb_bindings/bindings/python_client/types.py +++ b/osidb_bindings/bindings/python_client/types.py @@ -1,33 +1,27 @@ -""" Contains some shared types for properties """ -from typing import ( - BinaryIO, - Generic, - MutableMapping, - Optional, - TextIO, - Tuple, - TypeVar, - Union, -) +"""Contains some shared types for properties""" -import attr +from collections.abc import MutableMapping +from http import HTTPStatus +from typing import BinaryIO, Generic, Literal, Optional, TypeVar + +from attrs import define class Unset: - def __bool__(self) -> bool: + def __bool__(self) -> Literal[False]: return False UNSET: Unset = Unset() -FileJsonType = Tuple[Optional[str], Union[BinaryIO, TextIO], Optional[str]] +FileJsonType = tuple[Optional[str], BinaryIO, Optional[str]] -@attr.s(auto_attribs=True) +@define class File: """Contains information for file uploads""" - payload: Union[BinaryIO, TextIO] + payload: BinaryIO file_name: Optional[str] = None mime_type: Optional[str] = None @@ -39,18 +33,20 @@ def to_tuple(self) -> FileJsonType: T = TypeVar("T") -@attr.s(auto_attribs=True) +@define class Response(Generic[T]): """A response from an endpoint""" - status_code: int + status_code: HTTPStatus content: bytes headers: MutableMapping[str, str] parsed: Optional[T] -__all__ = ["File", "Response", "FileJsonType"] - - class OSIDBModel: """Base class for all 'non-primitive' and 'non-enum' models""" + + pass + + +__all__ = ["UNSET", "File", "FileJsonType", "OSIDBModel", "Response", "Unset"]