diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 67fc4d6c..d9f5e2bb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ repos: - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.6.0 + rev: v0.8.1 hooks: # Run the linter. - id: ruff diff --git a/docs/source/conf.py b/docs/source/conf.py index e1e0845a..890a6e40 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -125,9 +125,7 @@ class PatchedPythonDomain(PythonDomain): def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode): if 'refspecific' in node: del node['refspecific'] - return super(PatchedPythonDomain, self).resolve_xref( - env, fromdocname, builder, typ, target, node, contnode - ) + return super().resolve_xref(env, fromdocname, builder, typ, target, node, contnode) def skip(app, what, name, obj, would_skip, options): diff --git a/podman/__init__.py b/podman/__init__.py index 3f259ea2..8e342bd9 100644 --- a/podman/__init__.py +++ b/podman/__init__.py @@ -1,9 +1,5 @@ """Podman client module.""" -import sys - -assert sys.version_info >= (3, 6), "Python 3.6 or greater is required." - from podman.client import PodmanClient, from_env from podman.version import __version__ diff --git a/podman/api/__init__.py b/podman/api/__init__.py index 8ad99451..0079d0d2 100644 --- a/podman/api/__init__.py +++ b/podman/api/__init__.py @@ -17,20 +17,12 @@ DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024 -try: - from typing import Literal -except (ImportError, ModuleNotFoundError): - try: - from typing_extensions import Literal - except (ImportError, ModuleNotFoundError): - from podman.api.typing_extensions import Literal # pylint: disable=ungrouped-imports # isort: unique-list __all__ = [ 'APIClient', 'COMPATIBLE_VERSION', 'DEFAULT_CHUNK_SIZE', - 'Literal', 'VERSION', 'cached_property', 'create_tar', diff --git a/podman/api/adapter_utils.py b/podman/api/adapter_utils.py index 2ec7cf15..b5a92f48 100644 --- a/podman/api/adapter_utils.py +++ b/podman/api/adapter_utils.py @@ -1,6 +1,7 @@ """Utility functions for working with Adapters.""" -from typing import NamedTuple, Mapping +from typing import NamedTuple +from collections.abc import Mapping def _key_normalizer(key_class: NamedTuple, request_context: Mapping) -> Mapping: diff --git a/podman/api/client.py b/podman/api/client.py index 062c3cad..3fa26ff3 100644 --- a/podman/api/client.py +++ b/podman/api/client.py @@ -3,7 +3,14 @@ import json import warnings import urllib.parse -from typing import Any, ClassVar, IO, Iterable, List, Mapping, Optional, Tuple, Type, Union +from typing import ( + Any, + ClassVar, + IO, + Optional, + Union, +) +from collections.abc import Iterable, Mapping import requests from requests.adapters import HTTPAdapter @@ -20,12 +27,12 @@ str, bytes, Mapping[str, Any], - Iterable[Tuple[str, Optional[str]]], + Iterable[tuple[str, Optional[str]]], IO, ] """Type alias for request data parameter.""" -_Timeout = Union[None, float, Tuple[float, float], Tuple[float, None]] +_Timeout = Union[None, float, tuple[float, float], tuple[float, None]] """Type alias for request timeout parameter.""" @@ -58,7 +65,7 @@ def __getattr__(self, item: str): """Forward any query for an attribute not defined in this proxy class to wrapped class.""" return getattr(self._response, item) - def raise_for_status(self, not_found: Type[APIError] = NotFound) -> None: + def raise_for_status(self, not_found: type[APIError] = NotFound) -> None: """Raises exception when Podman service reports one.""" if self.status_code < 400: return @@ -81,7 +88,7 @@ class APIClient(requests.Session): # Abstract methods (delete,get,head,post) are specialized and pylint cannot walk hierarchy. # pylint: disable=too-many-instance-attributes,arguments-differ,arguments-renamed - supported_schemes: ClassVar[List[str]] = ( + supported_schemes: ClassVar[list[str]] = ( "unix", "http+unix", "ssh", @@ -156,7 +163,7 @@ def __init__( self.mount("http://", HTTPAdapter(**http_adapter_kwargs)) self.mount("https://", HTTPAdapter(**http_adapter_kwargs)) else: - assert False, "APIClient.supported_schemes changed without adding a branch here." + raise PodmanError("APIClient.supported_schemes changed without adding a branch here.") self.version = version or VERSION self.path_prefix = f"/v{self.version}/libpod/" @@ -235,7 +242,7 @@ def get( self, path: Union[str, bytes], *, - params: Union[None, bytes, Mapping[str, List[str]]] = None, + params: Union[None, bytes, Mapping[str, list[str]]] = None, headers: Optional[Mapping[str, str]] = None, timeout: _Timeout = None, stream: Optional[bool] = False, diff --git a/podman/api/http_utils.py b/podman/api/http_utils.py index 0f327c91..e0bb062e 100644 --- a/podman/api/http_utils.py +++ b/podman/api/http_utils.py @@ -3,16 +3,17 @@ import base64 import collections.abc import json -from typing import Dict, List, Mapping, Optional, Union, Any +from typing import Optional, Union, Any +from collections.abc import Mapping -def prepare_filters(filters: Union[str, List[str], Mapping[str, str]]) -> Optional[str]: - """Return filters as an URL quoted JSON Dict[str, List[Any]].""" +def prepare_filters(filters: Union[str, list[str], Mapping[str, str]]) -> Optional[str]: + """Return filters as an URL quoted JSON dict[str, list[Any]].""" if filters is None or len(filters) == 0: return None - criteria: Dict[str, List[str]] = {} + criteria: dict[str, list[str]] = {} if isinstance(filters, str): _format_string(filters, criteria) elif isinstance(filters, collections.abc.Mapping): @@ -42,12 +43,12 @@ def _format_dict(filters, criteria): for key, value in filters.items(): if value is None: continue - value = str(value) + str_value = str(value) if key in criteria: - criteria[key].append(value) + criteria[key].append(str_value) else: - criteria[key] = [value] + criteria[key] = [str_value] def _format_string(filters, criteria): @@ -67,7 +68,7 @@ def prepare_body(body: Mapping[str, Any]) -> str: return json.dumps(body, sort_keys=True) -def _filter_values(mapping: Mapping[str, Any], recursion=False) -> Dict[str, Any]: +def _filter_values(mapping: Mapping[str, Any], recursion=False) -> dict[str, Any]: """Returns a canonical dictionary with values == None or empty Iterables removed. Dictionary is walked using recursion. @@ -91,7 +92,7 @@ def _filter_values(mapping: Mapping[str, Any], recursion=False) -> Dict[str, Any else: proposal = value - if not recursion and proposal not in (None, str(), [], {}): + if not recursion and proposal not in (None, "", [], {}): canonical[key] = proposal elif recursion and proposal not in (None, [], {}): canonical[key] = proposal @@ -99,5 +100,5 @@ def _filter_values(mapping: Mapping[str, Any], recursion=False) -> Dict[str, Any return canonical -def encode_auth_header(auth_config: Dict[str, str]) -> str: +def encode_auth_header(auth_config: dict[str, str]) -> str: return base64.urlsafe_b64encode(json.dumps(auth_config).encode('utf-8')) diff --git a/podman/api/parse_utils.py b/podman/api/parse_utils.py index c07762ea..70ba09c2 100644 --- a/podman/api/parse_utils.py +++ b/podman/api/parse_utils.py @@ -5,13 +5,14 @@ import json import struct from datetime import datetime -from typing import Any, Dict, Iterator, Optional, Tuple, Union +from typing import Any, Optional, Union +from collections.abc import Iterator from requests import Response from .output_utils import demux_output -def parse_repository(name: str) -> Tuple[str, Optional[str]]: +def parse_repository(name: str) -> tuple[str, Optional[str]]: """Parse repository image name from tag or digest Returns: @@ -31,7 +32,7 @@ def parse_repository(name: str) -> Tuple[str, Optional[str]]: return name, None -def decode_header(value: Optional[str]) -> Dict[str, Any]: +def decode_header(value: Optional[str]) -> dict[str, Any]: """Decode a base64 JSON header value.""" if value is None: return {} @@ -82,7 +83,7 @@ def frames(response: Response) -> Iterator[bytes]: def stream_frames( response: Response, demux: bool = False -) -> Iterator[Union[bytes, Tuple[bytes, bytes]]]: +) -> Iterator[Union[bytes, tuple[bytes, bytes]]]: """Returns each frame from multiplexed streamed payload. If ``demux`` then output will be tuples where the first position is ``STDOUT`` and the second @@ -109,7 +110,7 @@ def stream_frames( def stream_helper( response: Response, decode_to_json: bool = False -) -> Union[Iterator[bytes], Iterator[Dict[str, Any]]]: +) -> Union[Iterator[bytes], Iterator[dict[str, Any]]]: """Helper to stream results and optionally decode to json""" for value in response.iter_lines(): if decode_to_json: diff --git a/podman/api/tar_utils.py b/podman/api/tar_utils.py index 950cc44f..7470e19a 100644 --- a/podman/api/tar_utils.py +++ b/podman/api/tar_utils.py @@ -6,12 +6,12 @@ import tarfile import tempfile from fnmatch import fnmatch -from typing import BinaryIO, List, Optional +from typing import BinaryIO, Optional import sys -def prepare_containerignore(anchor: str) -> List[str]: +def prepare_containerignore(anchor: str) -> list[str]: """Return the list of patterns for filenames to exclude. .containerignore takes precedence over .dockerignore. @@ -24,7 +24,7 @@ def prepare_containerignore(anchor: str) -> List[str]: with ignore.open(encoding='utf-8') as file: return list( filter( - lambda l: l and not l.startswith("#"), + lambda L: L and not L.startswith("#"), (line.strip() for line in file.readlines()), ) ) @@ -53,7 +53,7 @@ def prepare_containerfile(anchor: str, dockerfile: str) -> str: def create_tar( - anchor: str, name: str = None, exclude: List[str] = None, gzip: bool = False + anchor: str, name: str = None, exclude: list[str] = None, gzip: bool = False ) -> BinaryIO: """Create a tarfile from context_dir to send to Podman service. @@ -119,7 +119,7 @@ def add_filter(info: tarfile.TarInfo) -> Optional[tarfile.TarInfo]: return open(name.name, "rb") # pylint: disable=consider-using-with -def _exclude_matcher(path: str, exclude: List[str]) -> bool: +def _exclude_matcher(path: str, exclude: list[str]) -> bool: """Returns True if path matches an entry in exclude. Note: diff --git a/podman/api/typing_extensions.py b/podman/api/typing_extensions.py deleted file mode 100644 index ebddfa27..00000000 --- a/podman/api/typing_extensions.py +++ /dev/null @@ -1,3040 +0,0 @@ -"""Provide typing.Literal when not supported by OS release. - -FIXME: Remove file when supported Python >= 3.8 -""" - -# Code is backup for missing typing_extensions... -# pylint: disable-all - -import abc -import collections -import collections.abc as collections_abc -import contextlib -import operator -import typing - -# These are used by Protocol implementation -# We use internal typing helpers here, but this significantly reduces -# code duplication. (Also this is only until Protocol is in typing.) -from typing import Generic, Callable, TypeVar, Tuple - -import sys - -# After PEP 560, internal typing API was substantially reworked. -# This is especially important for Protocol class which uses internal APIs -# quite extensively. -PEP_560 = sys.version_info[:3] >= (3, 7, 0) - -if PEP_560: - GenericMeta = TypingMeta = type -else: - from typing import GenericMeta, TypingMeta -OLD_GENERICS = False -try: - from typing import _type_vars, _next_in_mro, _type_check -except ImportError: - OLD_GENERICS = True -try: - from typing import _subs_tree # noqa - - SUBS_TREE = True -except ImportError: - SUBS_TREE = False -try: - from typing import _tp_cache -except ImportError: - - def _tp_cache(x): - return x - - -try: - from typing import _TypingEllipsis, _TypingEmpty -except ImportError: - - class _TypingEllipsis: - pass - - class _TypingEmpty: - pass - - -# The two functions below are copies of typing internal helpers. -# They are needed by _ProtocolMeta - - -def _no_slots_copy(dct): - dict_copy = dict(dct) - if '__slots__' in dict_copy: - for slot in dict_copy['__slots__']: - dict_copy.pop(slot, None) - return dict_copy - - -def _check_generic(cls, parameters): - if not cls.__parameters__: - raise TypeError("%s is not a generic class" % repr(cls)) - alen = len(parameters) - elen = len(cls.__parameters__) - if alen != elen: - raise TypeError( - "Too %s parameters for %s; actual %s, expected %s" - % ("many" if alen > elen else "few", repr(cls), alen, elen) - ) - - -if hasattr(typing, '_generic_new'): - _generic_new = typing._generic_new -else: - # Note: The '_generic_new(...)' function is used as a part of the - # process of creating a generic type and was added to the typing module - # as of Python 3.5.3. - # - # We've defined '_generic_new(...)' below to exactly match the behavior - # implemented in older versions of 'typing' bundled with Python 3.5.0 to - # 3.5.2. This helps eliminate redundancy when defining collection types - # like 'Deque' later. - # - # See https://github.com/python/typing/pull/308 for more details -- in - # particular, compare and contrast the definition of types like - # 'typing.List' before and after the merge. - - def _generic_new(base_cls, cls, *args, **kwargs): - return base_cls.__new__(cls, *args, **kwargs) - - -# See https://github.com/python/typing/pull/439 -if hasattr(typing, '_geqv'): - from typing import _geqv - - _geqv_defined = True -else: - _geqv = None - _geqv_defined = False - -if sys.version_info[:2] >= (3, 6): - import _collections_abc - - _check_methods_in_mro = _collections_abc._check_methods -else: - - def _check_methods_in_mro(C, *methods): - mro = C.__mro__ - for method in methods: - for B in mro: - if method in B.__dict__: - if B.__dict__[method] is None: - return NotImplemented - break - else: - return NotImplemented - return True - - -# Please keep __all__ alphabetized within each category. -__all__ = [ - # Super-special typing primitives. - 'ClassVar', - 'Concatenate', - 'Final', - 'ParamSpec', - 'Type', - # ABCs (from collections.abc). - # The following are added depending on presence - # of their non-generic counterparts in stdlib: - # 'Awaitable', - # 'AsyncIterator', - # 'AsyncIterable', - # 'Coroutine', - # 'AsyncGenerator', - # 'AsyncContextManager', - # 'ChainMap', - # Concrete collection types. - 'ContextManager', - 'Counter', - 'Deque', - 'DefaultDict', - 'OrderedDict', - 'TypedDict', - # Structural checks, a.k.a. protocols. - 'SupportsIndex', - # One-off things. - 'final', - 'IntVar', - 'Literal', - 'NewType', - 'overload', - 'Text', - 'TypeAlias', - 'TypeGuard', - 'TYPE_CHECKING', -] - -# Annotated relies on substitution trees of pep 560. It will not work for -# versions of typing older than 3.5.3 -HAVE_ANNOTATED = PEP_560 or SUBS_TREE - -if PEP_560: - __all__.extend(["get_args", "get_origin", "get_type_hints"]) - -if HAVE_ANNOTATED: - __all__.append("Annotated") - -# Protocols are hard to backport to the original version of typing 3.5.0 -HAVE_PROTOCOLS = sys.version_info[:3] != (3, 5, 0) - -if HAVE_PROTOCOLS: - __all__.extend(['Protocol', 'runtime', 'runtime_checkable']) - -# TODO -if hasattr(typing, 'NoReturn'): - NoReturn = typing.NoReturn -elif hasattr(typing, '_FinalTypingBase'): - - class _NoReturn(typing._FinalTypingBase, _root=True): - """Special type indicating functions that never return. - Example:: - - from typing import NoReturn - - def stop() -> NoReturn: - raise Exception('no way') - - This type is invalid in other positions, e.g., ``List[NoReturn]`` - will fail in static type checkers. - """ - - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("NoReturn cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("NoReturn cannot be used with issubclass().") - - NoReturn = _NoReturn(_root=True) -else: - - class _NoReturnMeta(typing.TypingMeta): - """Metaclass for NoReturn""" - - def __new__(cls, name, bases, namespace, _root=False): - return super().__new__(cls, name, bases, namespace, _root=_root) - - def __instancecheck__(self, obj): - raise TypeError("NoReturn cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("NoReturn cannot be used with issubclass().") - - class NoReturn(typing.Final, metaclass=_NoReturnMeta, _root=True): - """Special type indicating functions that never return. - Example:: - - from typing import NoReturn - - def stop() -> NoReturn: - raise Exception('no way') - - This type is invalid in other positions, e.g., ``List[NoReturn]`` - will fail in static type checkers. - """ - - __slots__ = () - - -# Some unconstrained type variables. These are used by the container types. -# (These are not for export.) -T = typing.TypeVar('T') # Any type. -KT = typing.TypeVar('KT') # Key type. -VT = typing.TypeVar('VT') # Value type. -T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = typing.TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = typing.TypeVar('VT_co', covariant=True) # Value type covariant containers. -T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -if hasattr(typing, 'ClassVar'): - ClassVar = typing.ClassVar -elif hasattr(typing, '_FinalTypingBase'): - - class _ClassVar(typing._FinalTypingBase, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls( - typing._type_check( - item, '{} accepts only single type.'.format(cls.__name__[1:]) - ), - _root=True, - ) - raise TypeError('{} cannot be further subscripted'.format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - ClassVar = _ClassVar(_root=True) -else: - - class _ClassVarMeta(typing.TypingMeta): - """Metaclass for ClassVar""" - - def __new__(cls, name, bases, namespace, tp=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if tp is not None: - self.__type__ = tp - return self - - def __instancecheck__(self, obj): - raise TypeError("ClassVar cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("ClassVar cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is not None: - raise TypeError('{} cannot be further subscripted'.format(cls.__name__[1:])) - - param = typing._type_check( - item, '{} accepts only single type.'.format(cls.__name__[1:]) - ) - return cls(self.__name__, self.__bases__, dict(self.__dict__), tp=param, _root=True) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)( - self.__name__, self.__bases__, dict(self.__dict__), tp=self.__type__, _root=True - ) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - class ClassVar(typing.Final, metaclass=_ClassVarMeta, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __type__ = None - - -# On older versions of typing there is an internal class named "Final". -if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): - Final = typing.Final -elif sys.version_info[:2] >= (3, 7): - - class _FinalForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - item = typing._type_check(parameters, '{} accepts only single type'.format(self._name)) - return _GenericAlias(self, (item,)) - - Final = _FinalForm( - 'Final', - doc="""A special typing construct to indicate that a name - cannot be re-assigned or overridden in a subclass. - For example: - - MAX_SIZE: Final = 9000 - MAX_SIZE += 1 # Error reported by type checker - - class Connection: - TIMEOUT: Final[int] = 10 - class FastConnector(Connection): - TIMEOUT = 1 # Error reported by type checker - - There is no runtime checking of these properties.""", - ) -elif hasattr(typing, '_FinalTypingBase'): - - class _Final(typing._FinalTypingBase, _root=True): - """A special typing construct to indicate that a name - cannot be re-assigned or overridden in a subclass. - For example: - - MAX_SIZE: Final = 9000 - MAX_SIZE += 1 # Error reported by type checker - - class Connection: - TIMEOUT: Final[int] = 10 - class FastConnector(Connection): - TIMEOUT = 1 # Error reported by type checker - - There is no runtime checking of these properties. - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls( - typing._type_check( - item, '{} accepts only single type.'.format(cls.__name__[1:]) - ), - _root=True, - ) - raise TypeError('{} cannot be further subscripted'.format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _Final): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - Final = _Final(_root=True) -else: - - class _FinalMeta(typing.TypingMeta): - """Metaclass for Final""" - - def __new__(cls, name, bases, namespace, tp=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if tp is not None: - self.__type__ = tp - return self - - def __instancecheck__(self, obj): - raise TypeError("Final cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Final cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is not None: - raise TypeError('{} cannot be further subscripted'.format(cls.__name__[1:])) - - param = typing._type_check( - item, '{} accepts only single type.'.format(cls.__name__[1:]) - ) - return cls(self.__name__, self.__bases__, dict(self.__dict__), tp=param, _root=True) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)( - self.__name__, self.__bases__, dict(self.__dict__), tp=self.__type__, _root=True - ) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, Final): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - class Final(typing.Final, metaclass=_FinalMeta, _root=True): - """A special typing construct to indicate that a name - cannot be re-assigned or overridden in a subclass. - For example: - - MAX_SIZE: Final = 9000 - MAX_SIZE += 1 # Error reported by type checker - - class Connection: - TIMEOUT: Final[int] = 10 - class FastConnector(Connection): - TIMEOUT = 1 # Error reported by type checker - - There is no runtime checking of these properties. - """ - - __type__ = None - - -if hasattr(typing, 'final'): - final = typing.final -else: - - def final(f): - """This decorator can be used to indicate to type checkers that - the decorated method cannot be overridden, and decorated class - cannot be subclassed. For example: - - class Base: - @final - def done(self) -> None: - ... - class Sub(Base): - def done(self) -> None: # Error reported by type checker - ... - @final - class Leaf: - ... - class Other(Leaf): # Error reported by type checker - ... - - There is no runtime checking of these properties. - """ - return f - - -def IntVar(name): - return TypeVar(name) - - -if hasattr(typing, 'Literal'): - Literal = typing.Literal -elif sys.version_info[:2] >= (3, 7): - - class _LiteralForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - return _GenericAlias(self, parameters) - - Literal = _LiteralForm( - 'Literal', - doc="""A type that can be used to indicate to type checkers - that the corresponding value has a value literally equivalent - to the provided parameter. For example: - - var: Literal[4] = 4 - - The type checker understands that 'var' is literally equal to - the value 4 and no other value. - - Literal[...] cannot be subclassed. There is no runtime - checking verifying that the parameter is actually a value - instead of a type.""", - ) -elif hasattr(typing, '_FinalTypingBase'): - - class _Literal(typing._FinalTypingBase, _root=True): - """A type that can be used to indicate to type checkers that the - corresponding value has a value literally equivalent to the - provided parameter. For example: - - var: Literal[4] = 4 - - The type checker understands that 'var' is literally equal to the - value 4 and no other value. - - Literal[...] cannot be subclassed. There is no runtime checking - verifying that the parameter is actually a value instead of a type. - """ - - __slots__ = ('__values__',) - - def __init__(self, values=None, **kwds): - self.__values__ = values - - def __getitem__(self, values): - cls = type(self) - if self.__values__ is None: - if not isinstance(values, tuple): - values = (values,) - return cls(values, _root=True) - raise TypeError('{} cannot be further subscripted'.format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - return self - - def __repr__(self): - r = super().__repr__() - if self.__values__ is not None: - r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__))) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__values__)) - - def __eq__(self, other): - if not isinstance(other, _Literal): - return NotImplemented - if self.__values__ is not None: - return self.__values__ == other.__values__ - return self is other - - Literal = _Literal(_root=True) -else: - - class _LiteralMeta(typing.TypingMeta): - """Metaclass for Literal""" - - def __new__(cls, name, bases, namespace, values=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if values is not None: - self.__values__ = values - return self - - def __instancecheck__(self, obj): - raise TypeError("Literal cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Literal cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__values__ is not None: - raise TypeError('{} cannot be further subscripted'.format(cls.__name__[1:])) - - if not isinstance(item, tuple): - item = (item,) - return cls(self.__name__, self.__bases__, dict(self.__dict__), values=item, _root=True) - - def _eval_type(self, globalns, localns): - return self - - def __repr__(self): - r = super().__repr__() - if self.__values__ is not None: - r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__))) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__values__)) - - def __eq__(self, other): - if not isinstance(other, Literal): - return NotImplemented - if self.__values__ is not None: - return self.__values__ == other.__values__ - return self is other - - class Literal(typing.Final, metaclass=_LiteralMeta, _root=True): - """A type that can be used to indicate to type checkers that the - corresponding value has a value literally equivalent to the - provided parameter. For example: - - var: Literal[4] = 4 - - The type checker understands that 'var' is literally equal to the - value 4 and no other value. - - Literal[...] cannot be subclassed. There is no runtime checking - verifying that the parameter is actually a value instead of a type. - """ - - __values__ = None - - -def _overload_dummy(*args, **kwds): - """Helper for @overload to raise when called.""" - raise NotImplementedError( - "You should not call an overloaded function. " - "A series of @overload-decorated functions " - "outside a stub module should always be followed " - "by an implementation that is not @overload-ed." - ) - - -def overload(func): - """Decorator for overloaded functions/methods. - - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - # implementation goes here - """ - return _overload_dummy - - -# This is not a real generic class. Don't use outside annotations. -if hasattr(typing, 'Type'): - Type = typing.Type -else: - # Internal type variable used for Type[]. - CT_co = typing.TypeVar('CT_co', covariant=True, bound=type) - - class Type(typing.Generic[CT_co], extra=type): - """A special construct usable to annotate class objects. - - For example, suppose we have the following classes:: - - class User: ... # Abstract base for User classes - class BasicUser(User): ... - class ProUser(User): ... - class TeamUser(User): ... - - And a function that takes a class argument that's a subclass of - User and returns an instance of the corresponding class:: - - U = TypeVar('U', bound=User) - def new_user(user_class: Type[U]) -> U: - user = user_class() - # (Here we could write the user object to a database) - return user - joe = new_user(BasicUser) - - At this point the type checker knows that joe has type BasicUser. - """ - - __slots__ = () - - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. - - -def _define_guard(type_name): - """ - Returns True if the given type isn't defined in typing but - is defined in collections_abc. - - Adds the type to __all__ if the collection is found in either - typing or collection_abc. - """ - if hasattr(typing, type_name): - __all__.append(type_name) - globals()[type_name] = getattr(typing, type_name) - return False - elif hasattr(collections_abc, type_name): - __all__.append(type_name) - return True - else: - return False - - -class _ExtensionsGenericMeta(GenericMeta): - def __subclasscheck__(self, subclass): - """This mimics a more modern GenericMeta.__subclasscheck__() logic - (that does not have problems with recursion) to work around interactions - between collections, typing, and typing_extensions on older - versions of Python, see https://github.com/python/typing/issues/501. - """ - if sys.version_info[:3] >= (3, 5, 3) or sys.version_info[:3] < (3, 5, 0): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError( - "Parameterized generics cannot be used with class or instance checks" - ) - return False - if not self.__extra__: - return super().__subclasscheck__(subclass) - res = self.__extra__.__subclasshook__(subclass) - if res is not NotImplemented: - return res - if self.__extra__ in subclass.__mro__: - return True - for scls in self.__extra__.__subclasses__(): - if isinstance(scls, GenericMeta): - continue - if issubclass(subclass, scls): - return True - return False - - -if _define_guard('Awaitable'): - - class Awaitable( - typing.Generic[T_co], metaclass=_ExtensionsGenericMeta, extra=collections_abc.Awaitable - ): - __slots__ = () - - -if _define_guard('Coroutine'): - - class Coroutine( - Awaitable[V_co], - typing.Generic[T_co, T_contra, V_co], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.Coroutine, - ): - __slots__ = () - - -if _define_guard('AsyncIterable'): - - class AsyncIterable( - typing.Generic[T_co], metaclass=_ExtensionsGenericMeta, extra=collections_abc.AsyncIterable - ): - __slots__ = () - - -if _define_guard('AsyncIterator'): - - class AsyncIterator( - AsyncIterable[T_co], metaclass=_ExtensionsGenericMeta, extra=collections_abc.AsyncIterator - ): - __slots__ = () - - -if hasattr(typing, 'Deque'): - Deque = typing.Deque -elif _geqv_defined: - - class Deque( - collections.deque, - typing.MutableSequence[T], - metaclass=_ExtensionsGenericMeta, - extra=collections.deque, - ): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Deque): - return collections.deque(*args, **kwds) - return _generic_new(collections.deque, cls, *args, **kwds) - -else: - - class Deque( - collections.deque, - typing.MutableSequence[T], - metaclass=_ExtensionsGenericMeta, - extra=collections.deque, - ): - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Deque: - return collections.deque(*args, **kwds) - return _generic_new(collections.deque, cls, *args, **kwds) - - -if hasattr(typing, 'ContextManager'): - ContextManager = typing.ContextManager -elif hasattr(contextlib, 'AbstractContextManager'): - - class ContextManager( - typing.Generic[T_co], - metaclass=_ExtensionsGenericMeta, - extra=contextlib.AbstractContextManager, - ): - __slots__ = () - -else: - - class ContextManager(typing.Generic[T_co]): - __slots__ = () - - def __enter__(self): - return self - - @abc.abstractmethod - def __exit__(self, exc_type, exc_value, traceback): - return None - - @classmethod - def __subclasshook__(cls, C): - if cls is ContextManager: - # In Python 3.6+, it is possible to set a method to None to - # explicitly indicate that the class does not implement an ABC - # (https://bugs.python.org/issue25958), but we do not support - # that pattern here because this fallback class is only used - # in Python 3.5 and earlier. - if any("__enter__" in B.__dict__ for B in C.__mro__) and any( - "__exit__" in B.__dict__ for B in C.__mro__ - ): - return True - return NotImplemented - - -if hasattr(typing, 'AsyncContextManager'): - AsyncContextManager = typing.AsyncContextManager - __all__.append('AsyncContextManager') -elif hasattr(contextlib, 'AbstractAsyncContextManager'): - - class AsyncContextManager( - typing.Generic[T_co], - metaclass=_ExtensionsGenericMeta, - extra=contextlib.AbstractAsyncContextManager, - ): - __slots__ = () - - __all__.append('AsyncContextManager') -elif sys.version_info[:2] >= (3, 5): - exec( - """ -class AsyncContextManager(typing.Generic[T_co]): - __slots__ = () - - async def __aenter__(self): - return self - - @abc.abstractmethod - async def __aexit__(self, exc_type, exc_value, traceback): - return None - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncContextManager: - return _check_methods_in_mro(C, "__aenter__", "__aexit__") - return NotImplemented - -__all__.append('AsyncContextManager') -""" - ) - -if hasattr(typing, 'DefaultDict'): - DefaultDict = typing.DefaultDict -elif _geqv_defined: - - class DefaultDict( - collections.defaultdict, - typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.defaultdict, - ): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, DefaultDict): - return collections.defaultdict(*args, **kwds) - return _generic_new(collections.defaultdict, cls, *args, **kwds) - -else: - - class DefaultDict( - collections.defaultdict, - typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.defaultdict, - ): - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is DefaultDict: - return collections.defaultdict(*args, **kwds) - return _generic_new(collections.defaultdict, cls, *args, **kwds) - - -if hasattr(typing, 'OrderedDict'): - OrderedDict = typing.OrderedDict -elif (3, 7, 0) <= sys.version_info[:3] < (3, 7, 2): - OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) -elif _geqv_defined: - - class OrderedDict( - collections.OrderedDict, - typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.OrderedDict, - ): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, OrderedDict): - return collections.OrderedDict(*args, **kwds) - return _generic_new(collections.OrderedDict, cls, *args, **kwds) - -else: - - class OrderedDict( - collections.OrderedDict, - typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.OrderedDict, - ): - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is OrderedDict: - return collections.OrderedDict(*args, **kwds) - return _generic_new(collections.OrderedDict, cls, *args, **kwds) - - -if hasattr(typing, 'Counter'): - Counter = typing.Counter -elif (3, 5, 0) <= sys.version_info[:3] <= (3, 5, 1): - assert _geqv_defined - _TInt = typing.TypeVar('_TInt') - - class _CounterMeta(typing.GenericMeta): - """Metaclass for Counter""" - - def __getitem__(self, item): - return super().__getitem__((item, int)) - - class Counter( - collections.Counter, typing.Dict[T, int], metaclass=_CounterMeta, extra=collections.Counter - ): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Counter): - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - -elif _geqv_defined: - - class Counter( - collections.Counter, - typing.Dict[T, int], - metaclass=_ExtensionsGenericMeta, - extra=collections.Counter, - ): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Counter): - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - -else: - - class Counter( - collections.Counter, - typing.Dict[T, int], - metaclass=_ExtensionsGenericMeta, - extra=collections.Counter, - ): - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Counter: - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - - -if hasattr(typing, 'ChainMap'): - ChainMap = typing.ChainMap - __all__.append('ChainMap') -elif hasattr(collections, 'ChainMap'): - # ChainMap only exists in 3.3+ - if _geqv_defined: - - class ChainMap( - collections.ChainMap, - typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.ChainMap, - ): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, ChainMap): - return collections.ChainMap(*args, **kwds) - return _generic_new(collections.ChainMap, cls, *args, **kwds) - - else: - - class ChainMap( - collections.ChainMap, - typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.ChainMap, - ): - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is ChainMap: - return collections.ChainMap(*args, **kwds) - return _generic_new(collections.ChainMap, cls, *args, **kwds) - - __all__.append('ChainMap') - -if _define_guard('AsyncGenerator'): - - class AsyncGenerator( - AsyncIterator[T_co], - typing.Generic[T_co, T_contra], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.AsyncGenerator, - ): - __slots__ = () - - -if hasattr(typing, 'NewType'): - NewType = typing.NewType -else: - - def NewType(name, tp): - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: - - UserId = NewType('UserId', int) - - def name_by_id(user_id: UserId) -> str: - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ - - def new_type(x): - return x - - new_type.__name__ = name - new_type.__supertype__ = tp - return new_type - - -if hasattr(typing, 'Text'): - Text = typing.Text -else: - Text = str - -if hasattr(typing, 'TYPE_CHECKING'): - TYPE_CHECKING = typing.TYPE_CHECKING -else: - # Constant that's True when type checking, but False here. - TYPE_CHECKING = False - - -def _gorg(cls): - """This function exists for compatibility with old typing versions.""" - assert isinstance(cls, GenericMeta) - if hasattr(cls, '_gorg'): - return cls._gorg - while cls.__origin__ is not None: - cls = cls.__origin__ - return cls - - -if OLD_GENERICS: - - def _next_in_mro(cls): # noqa - """This function exists for compatibility with old typing versions.""" - next_in_mro = object - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: - next_in_mro = cls.__mro__[i + 1] - return next_in_mro - - -_PROTO_WHITELIST = [ - 'Callable', - 'Awaitable', - 'Iterable', - 'Iterator', - 'AsyncIterable', - 'AsyncIterator', - 'Hashable', - 'Sized', - 'Container', - 'Collection', - 'Reversible', - 'ContextManager', - 'AsyncContextManager', -] - - -def _get_protocol_attrs(cls): - attrs = set() - for base in cls.__mro__[:-1]: # without object - if base.__name__ in ('Protocol', 'Generic'): - continue - annotations = getattr(base, '__annotations__', {}) - for attr in list(base.__dict__.keys()) + list(annotations.keys()): - if not attr.startswith('_abc_') and attr not in ( - '__abstractmethods__', - '__annotations__', - '__weakref__', - '_is_protocol', - '_is_runtime_protocol', - '__dict__', - '__args__', - '__slots__', - '__next_in_mro__', - '__parameters__', - '__origin__', - '__orig_bases__', - '__extra__', - '__tree_hash__', - '__doc__', - '__subclasshook__', - '__init__', - '__new__', - '__module__', - '_MutableMapping__marker', - '_gorg', - ): - attrs.add(attr) - return attrs - - -def _is_callable_members_only(cls): - return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) - - -if hasattr(typing, 'Protocol'): - Protocol = typing.Protocol -elif HAVE_PROTOCOLS and not PEP_560: - - def _no_init(self, *args, **kwargs): - if type(self)._is_protocol: - raise TypeError('Protocols cannot be instantiated') - - class _ProtocolMeta(GenericMeta): - """Internal metaclass for Protocol. - - This exists so Protocol classes can be generic without deriving - from Generic. - """ - - if not OLD_GENERICS: - - def __new__( - cls, - name, - bases, - namespace, - tvars=None, - args=None, - origin=None, - extra=None, - orig_bases=None, - ): - # This is just a version copied from GenericMeta.__new__ that - # includes "Protocol" special treatment. (Comments removed for brevity.) - assert extra is None # Protocols should not have extra - if tvars is not None: - assert origin is not None - assert all(isinstance(t, TypeVar) for t in tvars), tvars - else: - tvars = _type_vars(bases) - gvars = None - for base in bases: - if base is Generic: - raise TypeError("Cannot inherit from plain Generic") - if isinstance(base, GenericMeta) and base.__origin__ in ( - Generic, - Protocol, - ): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] or" - " Protocol[...] multiple times." - ) - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - raise TypeError( - "Some type variables (%s) are not listed in %s[%s]" - % ( - ", ".join(str(t) for t in tvars if t not in gvarset), - ( - "Generic" - if any(b.__origin__ is Generic for b in bases) - else "Protocol" - ), - ", ".join(str(g) for g in gvars), - ) - ) - tvars = gvars - - initial_bases = bases - if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: - bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) - if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): - bases = tuple(b for b in bases if b is not Generic) - namespace.update({'__origin__': origin, '__extra__': extra}) - self = super(GenericMeta, cls).__new__(cls, name, bases, namespace, _root=True) - super(GenericMeta, self).__setattr__('_gorg', self if not origin else _gorg(origin)) - self.__parameters__ = tvars - self.__args__ = ( - tuple( - ... if a is _TypingEllipsis else () if a is _TypingEmpty else a - for a in args - ) - if args - else None - ) - self.__next_in_mro__ = _next_in_mro(self) - if orig_bases is None: - self.__orig_bases__ = initial_bases - elif origin is not None: - self._abc_registry = origin._abc_registry - self._abc_cache = origin._abc_cache - if hasattr(self, '_subs_tree'): - self.__tree_hash__ = ( - hash(self._subs_tree()) if origin else super(GenericMeta, self).__hash__() - ) - return self - - def __init__(cls, *args, **kwargs): - super().__init__(*args, **kwargs) - if not cls.__dict__.get('_is_protocol', None): - cls._is_protocol = any( - b is Protocol or isinstance(b, _ProtocolMeta) and b.__origin__ is Protocol - for b in cls.__bases__ - ) - if cls._is_protocol: - for base in cls.__mro__[1:]: - if not ( - base in (object, Generic) - or base.__module__ == 'collections.abc' - and base.__name__ in _PROTO_WHITELIST - or isinstance(base, TypingMeta) - and base._is_protocol - or isinstance(base, GenericMeta) - and base.__origin__ is Generic - ): - raise TypeError( - 'Protocols can only inherit from other protocols, got %r' % base - ) - - cls.__init__ = _no_init - - def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if ( - isinstance(annotations, typing.Mapping) - and attr in annotations - and isinstance(other, _ProtocolMeta) - and other._is_protocol - ): - break - else: - return NotImplemented - return True - - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook - - def __instancecheck__(self, instance): - # We need this method for situations where attributes are - # assigned in __init__. - if ( - not getattr(self, '_is_protocol', False) or _is_callable_members_only(self) - ) and issubclass(instance.__class__, self): - return True - if self._is_protocol: - if all( - hasattr(instance, attr) - and ( - not callable(getattr(self, attr, None)) - or getattr(instance, attr) is not None - ) - for attr in _get_protocol_attrs(self) - ): - return True - return super(GenericMeta, self).__instancecheck__(instance) - - def __subclasscheck__(self, cls): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError( - "Parameterized generics cannot be used with class or instance checks" - ) - return False - if self.__dict__.get('_is_protocol', None) and not self.__dict__.get( - '_is_runtime_protocol', None - ): - if sys._getframe(1).f_globals['__name__'] in ['abc', 'functools', 'typing']: - return False - raise TypeError( - "Instance and class checks can only be used with @runtime protocols" - ) - if self.__dict__.get('_is_runtime_protocol', None) and not _is_callable_members_only( - self - ): - if sys._getframe(1).f_globals['__name__'] in ['abc', 'functools', 'typing']: - return super(GenericMeta, self).__subclasscheck__(cls) - raise TypeError("Protocols with non-method members don't support issubclass()") - return super(GenericMeta, self).__subclasscheck__(cls) - - if not OLD_GENERICS: - - @_tp_cache - def __getitem__(self, params): - # We also need to copy this from GenericMeta.__getitem__ to get - # special treatment of "Protocol". (Comments removed for brevity.) - if not isinstance(params, tuple): - params = (params,) - if not params and _gorg(self) is not Tuple: - raise TypeError("Parameter list to %s[...] cannot be empty" % self.__qualname__) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self in (Generic, Protocol): - if not all(isinstance(p, TypeVar) for p in params): - raise TypeError("Parameters to %r[...] must all be type variables" % self) - if len(set(params)) != len(params): - raise TypeError("Parameters to %r[...] must all be unique" % self) - tvars = params - args = params - elif self in (Tuple, Callable): - tvars = _type_vars(params) - args = params - elif self.__origin__ in (Generic, Protocol): - raise TypeError("Cannot subscript already-subscripted %s" % repr(self)) - else: - _check_generic(self, params) - tvars = _type_vars(params) - args = params - - prepend = (self,) if self.__origin__ is None else () - return self.__class__( - self.__name__, - prepend + self.__bases__, - _no_slots_copy(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__, - ) - - class Protocol(metaclass=_ProtocolMeta): - """Base class for protocol classes. Protocol classes are defined as:: - - class Proto(Protocol): - def meth(self) -> int: - ... - - Such classes are primarily used with static type checkers that recognize - structural subtyping (static duck-typing), for example:: - - class C: - def meth(self) -> int: - return 0 - - def func(x: Proto) -> int: - return x.meth() - - func(C()) # Passes static type check - - See PEP 544 for details. Protocol classes decorated with - @typing_extensions.runtime act as simple-minded runtime protocol that checks - only the presence of given attributes, ignoring their type signatures. - - Protocol classes can be generic, they are defined as:: - - class GenProto({bases}): - def meth(self) -> T: - ... - """ - - __slots__ = () - _is_protocol = True - - def __new__(cls, *args, **kwds): - if _gorg(cls) is Protocol: - raise TypeError( - "Type Protocol cannot be instantiated; it can be used only as a base class" - ) - if OLD_GENERICS: - return _generic_new(_next_in_mro(cls), cls, *args, **kwds) - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - if Protocol.__doc__ is not None: - Protocol.__doc__ = Protocol.__doc__.format( - bases="Protocol, Generic[T]" if OLD_GENERICS else "Protocol[T]" - ) - -elif PEP_560: - from typing import _type_check, _GenericAlias, _collect_type_vars # noqa - - def _no_init(self, *args, **kwargs): - if type(self)._is_protocol: - raise TypeError('Protocols cannot be instantiated') - - class _ProtocolMeta(abc.ABCMeta): - # This metaclass is a bit unfortunate and exists only because of the lack - # of __instancehook__. - def __instancecheck__(cls, instance): - # We need this method for situations where attributes are - # assigned in __init__. - if ( - not getattr(cls, '_is_protocol', False) or _is_callable_members_only(cls) - ) and issubclass(instance.__class__, cls): - return True - if cls._is_protocol: - if all( - hasattr(instance, attr) - and ( - not callable(getattr(cls, attr, None)) - or getattr(instance, attr) is not None - ) - for attr in _get_protocol_attrs(cls) - ): - return True - return super().__instancecheck__(instance) - - class Protocol(metaclass=_ProtocolMeta): - # There is quite a lot of overlapping code with typing.Generic. - # Unfortunately it is hard to avoid this while these live in two different - # modules. The duplicated code will be removed when Protocol is moved to typing. - """Base class for protocol classes. Protocol classes are defined as:: - - class Proto(Protocol): - def meth(self) -> int: - ... - - Such classes are primarily used with static type checkers that recognize - structural subtyping (static duck-typing), for example:: - - class C: - def meth(self) -> int: - return 0 - - def func(x: Proto) -> int: - return x.meth() - - func(C()) # Passes static type check - - See PEP 544 for details. Protocol classes decorated with - @typing_extensions.runtime act as simple-minded runtime protocol that checks - only the presence of given attributes, ignoring their type signatures. - - Protocol classes can be generic, they are defined as:: - - class GenProto(Protocol[T]): - def meth(self) -> T: - ... - """ - - __slots__ = () - _is_protocol = True - - def __new__(cls, *args, **kwds): - if cls is Protocol: - raise TypeError( - "Type Protocol cannot be instantiated; it can only be used as a base class" - ) - return super().__new__(cls) - - @_tp_cache - def __class_getitem__(cls, params): - if not isinstance(params, tuple): - params = (params,) - if not params and cls is not Tuple: - raise TypeError( - "Parameter list to {}[...] cannot be empty".format(cls.__qualname__) - ) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if cls is Protocol: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, TypeVar) for p in params): - i = 0 - while isinstance(params[i], TypeVar): - i += 1 - raise TypeError( - "Parameters to Protocol[...] must all be type variables." - " Parameter {} is {}".format(i + 1, params[i]) - ) - if len(set(params)) != len(params): - raise TypeError("Parameters to Protocol[...] must all be unique") - else: - # Subscripting a regular Generic subclass. - _check_generic(cls, params) - return _GenericAlias(cls, params) - - def __init_subclass__(cls, *args, **kwargs): - tvars = [] - if '__orig_bases__' in cls.__dict__: - error = Generic in cls.__orig_bases__ - else: - error = Generic in cls.__bases__ - if error: - raise TypeError("Cannot inherit from plain Generic") - if '__orig_bases__' in cls.__dict__: - tvars = _collect_type_vars(cls.__orig_bases__) - # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. - # If found, tvars must be a subset of it. - # If not found, tvars is it. - # Also check for and reject plain Generic, - # and reject multiple Generic[...] and/or Protocol[...]. - gvars = None - for base in cls.__orig_bases__: - if isinstance(base, _GenericAlias) and base.__origin__ in (Generic, Protocol): - # for error messages - the_base = 'Generic' if base.__origin__ is Generic else 'Protocol' - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...]" - " and/or Protocol[...] multiple types." - ) - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) - s_args = ', '.join(str(g) for g in gvars) - raise TypeError( - "Some type variables ({}) are not listed in {}[{}]".format( - s_vars, the_base, s_args - ) - ) - tvars = gvars - cls.__parameters__ = tuple(tvars) - - # Determine if this is a protocol or a concrete subclass. - if not cls.__dict__.get('_is_protocol', None): - cls._is_protocol = any(b is Protocol for b in cls.__bases__) - - # Set (or override) the protocol subclass hook. - def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not getattr(cls, '_is_runtime_protocol', False): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError( - "Instance and class checks can only be used with @runtime protocols" - ) - if not _is_callable_members_only(cls): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Protocols with non-method members don't support issubclass()") - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if ( - isinstance(annotations, typing.Mapping) - and attr in annotations - and isinstance(other, _ProtocolMeta) - and other._is_protocol - ): - break - else: - return NotImplemented - return True - - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook - - # We have nothing more to do for non-protocols. - if not cls._is_protocol: - return - - # Check consistency of bases. - for base in cls.__bases__: - if not ( - base in (object, Generic) - or base.__module__ == 'collections.abc' - and base.__name__ in _PROTO_WHITELIST - or isinstance(base, _ProtocolMeta) - and base._is_protocol - ): - raise TypeError( - 'Protocols can only inherit from other protocols, got %r' % base - ) - cls.__init__ = _no_init - - -if hasattr(typing, 'runtime_checkable'): - runtime_checkable = typing.runtime_checkable -elif HAVE_PROTOCOLS: - - def runtime_checkable(cls): - """Mark a protocol class as a runtime protocol, so that it - can be used with isinstance() and issubclass(). Raise TypeError - if applied to a non-protocol class. - - This allows a simple-minded structural check very similar to the - one-offs in collections.abc such as Hashable. - """ - if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: - raise TypeError( - '@runtime_checkable can be only applied to protocol classes, got %r' % cls - ) - cls._is_runtime_protocol = True - return cls - - -if HAVE_PROTOCOLS: - # Exists for backwards compatibility. - runtime = runtime_checkable - -if hasattr(typing, 'SupportsIndex'): - SupportsIndex = typing.SupportsIndex -elif HAVE_PROTOCOLS: - - @runtime_checkable - class SupportsIndex(Protocol): - __slots__ = () - - @abc.abstractmethod - def __index__(self) -> int: - pass - - -if sys.version_info >= (3, 9, 2): - # The standard library TypedDict in Python 3.8 does not store runtime information - # about which (if any) keys are optional. See https://bugs.python.org/issue38834 - # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" - # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 - TypedDict = typing.TypedDict -else: - - def _check_fails(cls, other): - try: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools', 'typing']: - # Typed dicts are only for static structural subtyping. - raise TypeError('TypedDict does not support instance and class checks') - except (AttributeError, ValueError): - pass - return False - - def _dict_new(*args, **kwargs): - if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') - _, args = args[0], args[1:] # allow the "cls" keyword be passed - return dict(*args, **kwargs) - - _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' - - def _typeddict_new(*args, total=True, **kwargs): - if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') - _, args = args[0], args[1:] # allow the "cls" keyword be passed - if args: - typename, args = args[0], args[1:] # allow the "_typename" keyword be passed - elif '_typename' in kwargs: - typename = kwargs.pop('_typename') - import warnings - - warnings.warn( - "Passing '_typename' as keyword argument is deprecated", - DeprecationWarning, - stacklevel=2, - ) - else: - raise TypeError( - "TypedDict.__new__() missing 1 required positional argument: '_typename'" - ) - if args: - try: - (fields,) = args # allow the "_fields" keyword be passed - except ValueError: - raise TypeError( - 'TypedDict.__new__() takes from 2 to 3 ' - 'positional arguments but {} ' - 'were given'.format(len(args) + 2) - ) - elif '_fields' in kwargs and len(kwargs) == 1: - fields = kwargs.pop('_fields') - import warnings - - warnings.warn( - "Passing '_fields' as keyword argument is deprecated", - DeprecationWarning, - stacklevel=2, - ) - else: - fields = None - - if fields is None: - fields = kwargs - elif kwargs: - raise TypeError("TypedDict takes either a dict or keyword arguments, but not both") - - ns = {'__annotations__': dict(fields)} - try: - # Setting correct module is necessary to make typed dict classes pickleable. - ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - - return _TypedDictMeta(typename, (), ns, total=total) - - _typeddict_new.__text_signature__ = ( - '($cls, _typename, _fields=None, /, *, total=True, **kwargs)' - ) - - class _TypedDictMeta(type): - def __init__(cls, name, bases, ns, total=True): - # In Python 3.4 and 3.5 the __init__ method also needs to support the keyword arguments. - # See https://www.python.org/dev/peps/pep-0487/#implementation-details - super(_TypedDictMeta, cls).__init__(name, bases, ns) - - def __new__(cls, name, bases, ns, total=True): - # Create new typed dict class object. - # This method is called directly when TypedDict is subclassed, - # or via _typeddict_new when TypedDict is instantiated. This way - # TypedDict supports all three syntaxes described in its docstring. - # Subclasses and instances of TypedDict return actual dictionaries - # via _dict_new. - ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new - tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns) - - annotations = {} - own_annotations = ns.get('__annotations__', {}) - own_annotation_keys = set(own_annotations.keys()) - msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" - own_annotations = {n: typing._type_check(tp, msg) for n, tp in own_annotations.items()} - required_keys = set() - optional_keys = set() - - for base in bases: - annotations.update(base.__dict__.get('__annotations__', {})) - required_keys.update(base.__dict__.get('__required_keys__', ())) - optional_keys.update(base.__dict__.get('__optional_keys__', ())) - - annotations.update(own_annotations) - if total: - required_keys.update(own_annotation_keys) - else: - optional_keys.update(own_annotation_keys) - - tp_dict.__annotations__ = annotations - tp_dict.__required_keys__ = frozenset(required_keys) - tp_dict.__optional_keys__ = frozenset(optional_keys) - if not hasattr(tp_dict, '__total__'): - tp_dict.__total__ = total - return tp_dict - - __instancecheck__ = __subclasscheck__ = _check_fails - - TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) - TypedDict.__module__ = __name__ - TypedDict.__doc__ = """A simple typed name space. At runtime it is equivalent to a plain dict. - - TypedDict creates a dictionary type that expects all of its - instances to have a certain set of keys, with each key - associated with a value of a consistent type. This expectation - is not checked at runtime but is only enforced by type checkers. - Usage:: - - class Point2D(TypedDict): - x: int - y: int - label: str - - a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK - b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check - - assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') - - The type info can be accessed via the Point2D.__annotations__ dict, and - the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. - TypedDict supports two additional equivalent forms:: - - Point2D = TypedDict('Point2D', x=int, y=int, label=str) - Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) - - The class syntax is only supported in Python 3.6+, while two other - syntax forms work for Python 2.7 and 3.2+ - """ - -# Python 3.9+ has PEP 593 (Annotated and modified get_type_hints) -if hasattr(typing, 'Annotated'): - Annotated = typing.Annotated - get_type_hints = typing.get_type_hints - # Not exported and not a public API, but needed for get_origin() and get_args() - # to work. - _AnnotatedAlias = typing._AnnotatedAlias -elif PEP_560: - - class _AnnotatedAlias(typing._GenericAlias, _root=True): - """Runtime representation of an annotated type. - - At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' - with extra annotations. The alias behaves like a normal typing alias, - instantiating is the same as instantiating the underlying type, binding - it to types is also the same. - """ - - def __init__(self, origin, metadata): - if isinstance(origin, _AnnotatedAlias): - metadata = origin.__metadata__ + metadata - origin = origin.__origin__ - super().__init__(origin, origin) - self.__metadata__ = metadata - - def copy_with(self, params): - assert len(params) == 1 - new_type = params[0] - return _AnnotatedAlias(new_type, self.__metadata__) - - def __repr__(self): - return "typing_extensions.Annotated[{}, {}]".format( - typing._type_repr(self.__origin__), ", ".join(repr(a) for a in self.__metadata__) - ) - - def __reduce__(self): - return operator.getitem, (Annotated, (self.__origin__,) + self.__metadata__) - - def __eq__(self, other): - if not isinstance(other, _AnnotatedAlias): - return NotImplemented - if self.__origin__ != other.__origin__: - return False - return self.__metadata__ == other.__metadata__ - - def __hash__(self): - return hash((self.__origin__, self.__metadata__)) - - class Annotated: - """Add context specific metadata to a type. - - Example: Annotated[int, runtime_check.Unsigned] indicates to the - hypothetical runtime_check module that this type is an unsigned int. - Every other consumer of this type can ignore this metadata and treat - this type as int. - - The first argument to Annotated must be a valid type (and will be in - the __origin__ field), the remaining arguments are kept as a tuple in - the __extra__ field. - - Details: - - - It's an error to call `Annotated` with less than two arguments. - - Nested Annotated are flattened:: - - Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] - - - Instantiating an annotated type is equivalent to instantiating the - underlying type:: - - Annotated[C, Ann1](5) == C(5) - - - Annotated can be used as a generic type alias:: - - Optimized = Annotated[T, runtime.Optimize()] - Optimized[int] == Annotated[int, runtime.Optimize()] - - OptimizedList = Annotated[List[T], runtime.Optimize()] - OptimizedList[int] == Annotated[List[int], runtime.Optimize()] - """ - - __slots__ = () - - def __new__(cls, *args, **kwargs): - raise TypeError("Type Annotated cannot be instantiated.") - - @_tp_cache - def __class_getitem__(cls, params): - if not isinstance(params, tuple) or len(params) < 2: - raise TypeError( - "Annotated[...] should be used " - "with at least two arguments (a type and an " - "annotation)." - ) - msg = "Annotated[t, ...]: t must be a type." - origin = typing._type_check(params[0], msg) - metadata = tuple(params[1:]) - return _AnnotatedAlias(origin, metadata) - - def __init_subclass__(cls, *args, **kwargs): - raise TypeError("Cannot subclass {}.Annotated".format(cls.__module__)) - - def _strip_annotations(t): - """Strips the annotations from a given type.""" - if isinstance(t, _AnnotatedAlias): - return _strip_annotations(t.__origin__) - if isinstance(t, typing._GenericAlias): - stripped_args = tuple(_strip_annotations(a) for a in t.__args__) - if stripped_args == t.__args__: - return t - res = t.copy_with(stripped_args) - res._special = t._special - return res - return t - - def get_type_hints(obj, globalns=None, localns=None, include_extras=False): - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, adds Optional[t] if a - default value equal to None is set and recursively replaces all - 'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) - if include_extras: - return hint - return {k: _strip_annotations(t) for k, t in hint.items()} - -elif HAVE_ANNOTATED: - - def _is_dunder(name): - """Returns True if name is a __dunder_variable_name__.""" - return len(name) > 4 and name.startswith('__') and name.endswith('__') - - # Prior to Python 3.7 types did not have `copy_with`. A lot of the equality - # checks, argument expansion etc. are done on the _subs_tree. As a result we - # can't provide a get_type_hints function that strips out annotations. - - class AnnotatedMeta(typing.GenericMeta): - """Metaclass for Annotated""" - - def __new__(cls, name, bases, namespace, **kwargs): - if any(b is not object for b in bases): - raise TypeError("Cannot subclass " + str(Annotated)) - return super().__new__(cls, name, bases, namespace, **kwargs) - - @property - def __metadata__(self): - return self._subs_tree()[2] - - def _tree_repr(self, tree): - cls, origin, metadata = tree - if not isinstance(origin, tuple): - tp_repr = typing._type_repr(origin) - else: - tp_repr = origin[0]._tree_repr(origin) - metadata_reprs = ", ".join(repr(arg) for arg in metadata) - return '%s[%s, %s]' % (cls, tp_repr, metadata_reprs) - - def _subs_tree(self, tvars=None, args=None): # noqa - if self is Annotated: - return Annotated - res = super()._subs_tree(tvars=tvars, args=args) - # Flatten nested Annotated - if isinstance(res[1], tuple) and res[1][0] is Annotated: - sub_tp = res[1][1] - sub_annot = res[1][2] - return (Annotated, sub_tp, sub_annot + res[2]) - return res - - def _get_cons(self): - """Return the class used to create instance of this type.""" - if self.__origin__ is None: - raise TypeError( - "Cannot get the underlying type of a non-specialized Annotated type." - ) - tree = self._subs_tree() - while isinstance(tree, tuple) and tree[0] is Annotated: - tree = tree[1] - if isinstance(tree, tuple): - return tree[0] - else: - return tree - - @_tp_cache - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if self.__origin__ is not None: # specializing an instantiated type - return super().__getitem__(params) - elif not isinstance(params, tuple) or len(params) < 2: - raise TypeError( - "Annotated[...] should be instantiated " - "with at least two arguments (a type and an " - "annotation)." - ) - else: - msg = "Annotated[t, ...]: t must be a type." - tp = typing._type_check(params[0], msg) - metadata = tuple(params[1:]) - return self.__class__( - self.__name__, - self.__bases__, - _no_slots_copy(self.__dict__), - tvars=_type_vars((tp,)), - # Metadata is a tuple so it won't be touched by _replace_args et al. - args=(tp, metadata), - origin=self, - ) - - def __call__(self, *args, **kwargs): - cons = self._get_cons() - result = cons(*args, **kwargs) - try: - result.__orig_class__ = self - except AttributeError: - pass - return result - - def __getattr__(self, attr): - # For simplicity we just don't relay all dunder names - if self.__origin__ is not None and not _is_dunder(attr): - return getattr(self._get_cons(), attr) - raise AttributeError(attr) - - def __setattr__(self, attr, value): - if _is_dunder(attr) or attr.startswith('_abc_'): - super().__setattr__(attr, value) - elif self.__origin__ is None: - raise AttributeError(attr) - else: - setattr(self._get_cons(), attr, value) - - def __instancecheck__(self, obj): - raise TypeError("Annotated cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Annotated cannot be used with issubclass().") - - class Annotated(metaclass=AnnotatedMeta): - """Add context specific metadata to a type. - - Example: Annotated[int, runtime_check.Unsigned] indicates to the - hypothetical runtime_check module that this type is an unsigned int. - Every other consumer of this type can ignore this metadata and treat - this type as int. - - The first argument to Annotated must be a valid type, the remaining - arguments are kept as a tuple in the __metadata__ field. - - Details: - - - It's an error to call `Annotated` with less than two arguments. - - Nested Annotated are flattened:: - - Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] - - - Instantiating an annotated type is equivalent to instantiating the - underlying type:: - - Annotated[C, Ann1](5) == C(5) - - - Annotated can be used as a generic type alias:: - - Optimized = Annotated[T, runtime.Optimize()] - Optimized[int] == Annotated[int, runtime.Optimize()] - - OptimizedList = Annotated[List[T], runtime.Optimize()] - OptimizedList[int] == Annotated[List[int], runtime.Optimize()] - """ - - -# Python 3.8 has get_origin() and get_args() but those implementations aren't -# Annotated-aware, so we can't use those, only Python 3.9 versions will do. -# Similarly, Python 3.9's implementation doesn't support ParamSpecArgs and -# ParamSpecKwargs. -if sys.version_info[:2] >= (3, 10): - get_origin = typing.get_origin - get_args = typing.get_args -elif PEP_560: - from typing import _GenericAlias - - try: - # 3.9+ - from typing import _BaseGenericAlias - except ImportError: - _BaseGenericAlias = _GenericAlias - try: - # 3.9+ - from typing import GenericAlias - except ImportError: - GenericAlias = _GenericAlias - - def get_origin(tp): - """Get the unsubscripted version of a type. - - This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar - and Annotated. Return None for unsupported types. Examples:: - - get_origin(Literal[42]) is Literal - get_origin(int) is None - get_origin(ClassVar[int]) is ClassVar - get_origin(Generic) is Generic - get_origin(Generic[T]) is Generic - get_origin(Union[T, int]) is Union - get_origin(List[Tuple[T, T]][int]) == list - get_origin(P.args) is P - """ - if isinstance(tp, _AnnotatedAlias): - return Annotated - if isinstance( - tp, (_GenericAlias, GenericAlias, _BaseGenericAlias, ParamSpecArgs, ParamSpecKwargs) - ): - return tp.__origin__ - if tp is Generic: - return Generic - return None - - def get_args(tp): - """Get type arguments with all substitutions performed. - - For unions, basic simplifications used by Union constructor are performed. - Examples:: - get_args(Dict[str, int]) == (str, int) - get_args(int) == () - get_args(Union[int, Union[T, int], str][int]) == (int, str) - get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) - get_args(Callable[[], T][int]) == ([], int) - """ - if isinstance(tp, _AnnotatedAlias): - return (tp.__origin__,) + tp.__metadata__ - if isinstance(tp, (_GenericAlias, GenericAlias)): - if getattr(tp, "_special", False): - return () - res = tp.__args__ - if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: - res = (list(res[:-1]), res[-1]) - return res - return () - - -if hasattr(typing, 'TypeAlias'): - TypeAlias = typing.TypeAlias -elif sys.version_info[:2] >= (3, 9): - - class _TypeAliasForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_TypeAliasForm - def TypeAlias(self, parameters): - """Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example above. - """ - raise TypeError("{} is not subscriptable".format(self)) - -elif sys.version_info[:2] >= (3, 7): - - class _TypeAliasForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - TypeAlias = _TypeAliasForm( - 'TypeAlias', - doc="""Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example - above.""", - ) - -elif hasattr(typing, '_FinalTypingBase'): - - class _TypeAliasMeta(typing.TypingMeta): - """Metaclass for TypeAlias""" - - def __repr__(self): - return 'typing_extensions.TypeAlias' - - class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True): - """Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example above. - """ - - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("TypeAlias cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeAlias cannot be used with issubclass().") - - def __repr__(self): - return 'typing_extensions.TypeAlias' - - TypeAlias = _TypeAliasBase(_root=True) -else: - - class _TypeAliasMeta(typing.TypingMeta): - """Metaclass for TypeAlias""" - - def __instancecheck__(self, obj): - raise TypeError("TypeAlias cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeAlias cannot be used with issubclass().") - - def __call__(self, *args, **kwargs): - raise TypeError("Cannot instantiate TypeAlias") - - class TypeAlias(metaclass=_TypeAliasMeta, _root=True): - """Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example above. - """ - - __slots__ = () - - -# Python 3.10+ has PEP 612 -if hasattr(typing, 'ParamSpecArgs'): - ParamSpecArgs = typing.ParamSpecArgs - ParamSpecKwargs = typing.ParamSpecKwargs -else: - - class _Immutable: - """Mixin to indicate that object should not be copied.""" - - __slots__ = () - - def __copy__(self): - return self - - def __deepcopy__(self, memo): - return self - - class ParamSpecArgs(_Immutable): - """The args for a ParamSpec object. - - Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. - - ParamSpecArgs objects have a reference back to their ParamSpec: - - P.args.__origin__ is P - - This type is meant for runtime introspection and has no special meaning to - static type checkers. - """ - - def __init__(self, origin): - self.__origin__ = origin - - def __repr__(self): - return "{}.args".format(self.__origin__.__name__) - - class ParamSpecKwargs(_Immutable): - """The kwargs for a ParamSpec object. - - Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. - - ParamSpecKwargs objects have a reference back to their ParamSpec: - - P.kwargs.__origin__ is P - - This type is meant for runtime introspection and has no special meaning to - static type checkers. - """ - - def __init__(self, origin): - self.__origin__ = origin - - def __repr__(self): - return "{}.kwargs".format(self.__origin__.__name__) - - -if hasattr(typing, 'ParamSpec'): - ParamSpec = typing.ParamSpec -else: - # Inherits from list as a workaround for Callable checks in Python < 3.9.2. - class ParamSpec(list): - """Parameter specification variable. - - Usage:: - - P = ParamSpec('P') - - Parameter specification variables exist primarily for the benefit of static - type checkers. They are used to forward the parameter types of one - callable to another callable, a pattern commonly found in higher order - functions and decorators. They are only valid when used in ``Concatenate``, - or s the first argument to ``Callable``. In Python 3.10 and higher, - they are also supported in user-defined Generics at runtime. - See class Generic for more information on generic types. An - example for annotating a decorator:: - - T = TypeVar('T') - P = ParamSpec('P') - - def add_logging(f: Callable[P, T]) -> Callable[P, T]: - '''A type-safe decorator to add logging to a function.''' - def inner(*args: P.args, **kwargs: P.kwargs) -> T: - logging.info(f'{f.__name__} was called') - return f(*args, **kwargs) - return inner - - @add_logging - def add_two(x: float, y: float) -> float: - '''Add two numbers together.''' - return x + y - - Parameter specification variables defined with covariant=True or - contravariant=True can be used to declare covariant or contravariant - generic types. These keyword arguments are valid, but their actual semantics - are yet to be decided. See PEP 612 for details. - - Parameter specification variables can be introspected. e.g.: - - P.__name__ == 'T' - P.__bound__ == None - P.__covariant__ == False - P.__contravariant__ == False - - Note that only parameter specification variables defined in global scope can - be pickled. - """ - - # Trick Generic __parameters__. - __class__ = TypeVar - - @property - def args(self): - return ParamSpecArgs(self) - - @property - def kwargs(self): - return ParamSpecKwargs(self) - - def __init__(self, name, *, bound=None, covariant=False, contravariant=False): - super().__init__([self]) - self.__name__ = name - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - if bound: - self.__bound__ = typing._type_check(bound, 'Bound must be a type.') - else: - self.__bound__ = None - - # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None - if def_mod != 'typing_extensions': - self.__module__ = def_mod - - def __repr__(self): - if self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __hash__(self): - return object.__hash__(self) - - def __eq__(self, other): - return self is other - - def __reduce__(self): - return self.__name__ - - # Hack to get typing._type_check to pass. - def __call__(self, *args, **kwargs): - pass - - if not PEP_560: - # Only needed in 3.6 and lower. - def _get_type_vars(self, tvars): - if self not in tvars: - tvars.append(self) - - -# Inherits from list as a workaround for Callable checks in Python < 3.9.2. -class _ConcatenateGenericAlias(list): - # Trick Generic into looking into this for __parameters__. - if PEP_560: - __class__ = _GenericAlias - elif sys.version_info[:3] == (3, 5, 2): - __class__ = typing.TypingMeta - else: - __class__ = typing._TypingBase - - # Flag in 3.8. - _special = False - # Attribute in 3.6 and earlier. - if sys.version_info[:3] == (3, 5, 2): - _gorg = typing.GenericMeta - else: - _gorg = typing.Generic - - def __init__(self, origin, args): - super().__init__(args) - self.__origin__ = origin - self.__args__ = args - - def __repr__(self): - _type_repr = typing._type_repr - return '{origin}[{args}]'.format( - origin=_type_repr(self.__origin__), - args=', '.join(_type_repr(arg) for arg in self.__args__), - ) - - def __hash__(self): - return hash((self.__origin__, self.__args__)) - - # Hack to get typing._type_check to pass in Generic. - def __call__(self, *args, **kwargs): - pass - - @property - def __parameters__(self): - return tuple(tp for tp in self.__args__ if isinstance(tp, (TypeVar, ParamSpec))) - - if not PEP_560: - # Only required in 3.6 and lower. - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - typing._get_type_vars(self.__parameters__, tvars) - - -@_tp_cache -def _concatenate_getitem(self, parameters): - if parameters == (): - raise TypeError("Cannot take a Concatenate of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - if not isinstance(parameters[-1], ParamSpec): - raise TypeError("The last parameter to Concatenate should be a ParamSpec variable.") - msg = "Concatenate[arg, ...]: each arg must be a type." - parameters = tuple(typing._type_check(p, msg) for p in parameters) - return _ConcatenateGenericAlias(self, parameters) - - -if hasattr(typing, 'Concatenate'): - Concatenate = typing.Concatenate - _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa -elif sys.version_info[:2] >= (3, 9): - - @_TypeAliasForm - def Concatenate(self, parameters): - """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """ - return _concatenate_getitem(self, parameters) - -elif sys.version_info[:2] >= (3, 7): - - class _ConcatenateForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - return _concatenate_getitem(self, parameters) - - Concatenate = _ConcatenateForm( - 'Concatenate', - doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """, - ) - -elif hasattr(typing, '_FinalTypingBase'): - - class _ConcatenateAliasMeta(typing.TypingMeta): - """Metaclass for Concatenate.""" - - def __repr__(self): - return 'typing_extensions.Concatenate' - - class _ConcatenateAliasBase( - typing._FinalTypingBase, metaclass=_ConcatenateAliasMeta, _root=True - ): - """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """ - - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("Concatenate cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Concatenate cannot be used with issubclass().") - - def __repr__(self): - return 'typing_extensions.Concatenate' - - def __getitem__(self, parameters): - return _concatenate_getitem(self, parameters) - - Concatenate = _ConcatenateAliasBase(_root=True) -# For 3.5.0 - 3.5.2 -else: - - class _ConcatenateAliasMeta(typing.TypingMeta): - """Metaclass for Concatenate.""" - - def __instancecheck__(self, obj): - raise TypeError("TypeAlias cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeAlias cannot be used with issubclass().") - - def __call__(self, *args, **kwargs): - raise TypeError("Cannot instantiate TypeAlias") - - def __getitem__(self, parameters): - return _concatenate_getitem(self, parameters) - - class Concatenate(metaclass=_ConcatenateAliasMeta, _root=True): - """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """ - - __slots__ = () - - -if hasattr(typing, 'TypeGuard'): - TypeGuard = typing.TypeGuard -elif sys.version_info[:2] >= (3, 9): - - class _TypeGuardForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_TypeGuardForm - def TypeGuard(self, parameters): - """Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """ - item = typing._type_check(parameters, '{} accepts only single type.'.format(self)) - return _GenericAlias(self, (item,)) - -elif sys.version_info[:2] >= (3, 7): - - class _TypeGuardForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - item = typing._type_check( - parameters, '{} accepts only a single type'.format(self._name) - ) - return _GenericAlias(self, (item,)) - - TypeGuard = _TypeGuardForm( - 'TypeGuard', - doc="""Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """, - ) -elif hasattr(typing, '_FinalTypingBase'): - - class _TypeGuard(typing._FinalTypingBase, _root=True): - """Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls( - typing._type_check( - item, '{} accepts only a single type.'.format(cls.__name__[1:]) - ), - _root=True, - ) - raise TypeError('{} cannot be further subscripted'.format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _TypeGuard): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - TypeGuard = _TypeGuard(_root=True) -else: - - class _TypeGuardMeta(typing.TypingMeta): - """Metaclass for TypeGuard""" - - def __new__(cls, name, bases, namespace, tp=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if tp is not None: - self.__type__ = tp - return self - - def __instancecheck__(self, obj): - raise TypeError("TypeGuard cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeGuard cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is not None: - raise TypeError('{} cannot be further subscripted'.format(cls.__name__[1:])) - - param = typing._type_check( - item, '{} accepts only single type.'.format(cls.__name__[1:]) - ) - return cls(self.__name__, self.__bases__, dict(self.__dict__), tp=param, _root=True) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)( - self.__name__, self.__bases__, dict(self.__dict__), tp=self.__type__, _root=True - ) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not hasattr(other, "__type__"): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - class TypeGuard(typing.Final, metaclass=_TypeGuardMeta, _root=True): - """Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """ - - __type__ = None diff --git a/podman/client.py b/podman/client.py index 5785f6b9..f9a023e7 100644 --- a/podman/client.py +++ b/podman/client.py @@ -4,7 +4,7 @@ import os from contextlib import AbstractContextManager from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any, Optional from podman.api import cached_property from podman.api.client import APIClient @@ -88,8 +88,8 @@ def from_env( max_pool_size: Optional[int] = None, ssl_version: Optional[int] = None, # pylint: disable=unused-argument assert_hostname: bool = False, # pylint: disable=unused-argument - environment: Optional[Dict[str, str]] = None, - credstore_env: Optional[Dict[str, str]] = None, + environment: Optional[dict[str, str]] = None, + credstore_env: Optional[dict[str, str]] = None, use_ssh_client: bool = True, # pylint: disable=unused-argument ) -> "PodmanClient": """Returns connection to service using environment variables and parameters. @@ -175,7 +175,7 @@ def secrets(self): def system(self): return SystemManager(client=self.api) - def df(self) -> Dict[str, Any]: # pylint: disable=missing-function-docstring,invalid-name + def df(self) -> dict[str, Any]: # pylint: disable=missing-function-docstring,invalid-name return self.system.df() df.__doc__ = SystemManager.df.__doc__ diff --git a/podman/domain/config.py b/podman/domain/config.py index f683d446..1d4e8bf7 100644 --- a/podman/domain/config.py +++ b/podman/domain/config.py @@ -3,7 +3,7 @@ import sys import urllib from pathlib import Path -from typing import Dict, Optional +from typing import Optional import json from podman.api import cached_property @@ -24,7 +24,7 @@ class ServiceConnection: """ServiceConnection defines a connection to the Podman service.""" - def __init__(self, name: str, attrs: Dict[str, str]): + def __init__(self, name: str, attrs: dict[str, str]): """Create a Podman ServiceConnection.""" self.name = name self.attrs = attrs @@ -87,7 +87,7 @@ def __init__(self, path: Optional[str] = None): try: with open(self.path, encoding='utf-8') as file: self.attrs = json.load(file) - except: # pylint: disable=bare-except + except Exception: # if the user specifies a path, it can either be a JSON file # or a TOML file - so try TOML next try: @@ -122,14 +122,14 @@ def id(self): # pylint: disable=invalid-name @cached_property def services(self): - """Dict[str, ServiceConnection]: Returns list of service connections. + """dict[str, ServiceConnection]: Returns list of service connections. Examples: podman_config = PodmanConfig() address = podman_config.services["testing"] print(f"Testing service address {address}") """ - services: Dict[str, ServiceConnection] = {} + services: dict[str, ServiceConnection] = {} # read the keys of the toml file first engine = self.attrs.get("engine") diff --git a/podman/domain/containers.py b/podman/domain/containers.py index f603a9c6..2890698f 100644 --- a/podman/domain/containers.py +++ b/podman/domain/containers.py @@ -4,7 +4,8 @@ import logging import shlex from contextlib import suppress -from typing import Any, Dict, Iterable, Iterator, List, Mapping, Optional, Tuple, Union +from typing import Any, Optional, Union +from collections.abc import Iterable, Iterator, Mapping import requests @@ -54,7 +55,8 @@ def labels(self): @property def status(self): - """Literal["created", "initialized", "running", "stopped", "exited", "unknown"]: Returns status of container.""" + """Literal["created", "initialized", "running", "stopped", "exited", "unknown"]: + Returns status of container.""" with suppress(KeyError): return self.attrs["State"]["Status"] return "unknown" @@ -97,7 +99,7 @@ def commit(self, repository: str = None, tag: str = None, **kwargs) -> Image: Keyword Args: author (str): Name of commit author - changes (List[str]): Instructions to apply during commit + changes (list[str]): Instructions to apply during commit comment (str): Commit message to include with Image, overrides keyword message conf (dict[str, Any]): Ignored. format (str): Format of the image manifest and metadata @@ -120,7 +122,7 @@ def commit(self, repository: str = None, tag: str = None, **kwargs) -> Image: body = response.json() return ImagesManager(client=self.client).get(body["Id"]) - def diff(self) -> List[Dict[str, int]]: + def diff(self) -> list[dict[str, int]]: """Report changes of a container's filesystem. Raises: @@ -133,7 +135,7 @@ def diff(self) -> List[Dict[str, int]]: # pylint: disable=too-many-arguments def exec_run( self, - cmd: Union[str, List[str]], + cmd: Union[str, list[str]], *, stdout: bool = True, stderr: bool = True, @@ -144,11 +146,12 @@ def exec_run( detach: bool = False, stream: bool = False, socket: bool = False, # pylint: disable=unused-argument - environment: Union[Mapping[str, str], List[str]] = None, + environment: Union[Mapping[str, str], list[str]] = None, workdir: str = None, demux: bool = False, - ) -> Tuple[ - Optional[int], Union[Iterator[Union[bytes, Tuple[bytes, bytes]]], Any, Tuple[bytes, bytes]] + ) -> tuple[ + Optional[int], + Union[Iterator[Union[bytes, tuple[bytes, bytes]]], Any, tuple[bytes, bytes]], ]: """Run given command inside container and return results. @@ -165,7 +168,7 @@ def exec_run( stream: Stream response data. Ignored if ``detach`` is ``True``. Default: False socket: Return the connection socket to allow custom read/write operations. Default: False - environment: A dictionary or a List[str] in + environment: A dictionary or a list[str] in the following format ["PASSWORD=xxx"] or {"PASSWORD": "xxx"}. workdir: Path to working directory for this exec session @@ -244,7 +247,7 @@ def export(self, chunk_size: int = api.DEFAULT_CHUNK_SIZE) -> Iterator[bytes]: def get_archive( self, path: str, chunk_size: int = api.DEFAULT_CHUNK_SIZE - ) -> Tuple[Iterable, Dict[str, Any]]: + ) -> tuple[Iterable, dict[str, Any]]: """Download a file or folder from the container's filesystem. Args: @@ -267,7 +270,7 @@ def init(self) -> None: response = self.client.post(f"/containers/{self.id}/init") response.raise_for_status() - def inspect(self) -> Dict: + def inspect(self) -> dict: """Inspect a container. Raises: @@ -418,7 +421,7 @@ def start(self, **kwargs) -> None: def stats( self, **kwargs - ) -> Union[bytes, Dict[str, Any], Iterator[bytes], Iterator[Dict[str, Any]]]: + ) -> Union[bytes, dict[str, Any], Iterator[bytes], Iterator[dict[str, Any]]]: """Return statistics for container. Keyword Args: @@ -473,7 +476,7 @@ def stop(self, **kwargs) -> None: body = response.json() raise APIError(body["cause"], response=response, explanation=body["message"]) - def top(self, **kwargs) -> Union[Iterator[Dict[str, Any]], Dict[str, Any]]: + def top(self, **kwargs) -> Union[Iterator[dict[str, Any]], dict[str, Any]]: """Report on running processes in the container. Keyword Args: @@ -515,7 +518,7 @@ def wait(self, **kwargs) -> int: """Block until the container enters given state. Keyword Args: - condition (Union[str, List[str]]): Container state on which to release. + condition (Union[str, list[str]]): Container state on which to release. One or more of: "configured", "created", "running", "stopped", "paused", "exited", "removing", "stopping". interval (int): Time interval to wait before polling for completion. diff --git a/podman/domain/containers_create.py b/podman/domain/containers_create.py index e85eafed..523a9023 100644 --- a/podman/domain/containers_create.py +++ b/podman/domain/containers_create.py @@ -5,7 +5,8 @@ import logging import re from contextlib import suppress -from typing import Any, Dict, List, MutableMapping, Union +from typing import Any, Union +from collections.abc import MutableMapping from podman import api from podman.domain.containers import Container @@ -23,7 +24,10 @@ class CreateMixin: # pylint: disable=too-few-public-methods """Class providing create method for ContainersManager.""" def create( - self, image: Union[Image, str], command: Union[str, List[str], None] = None, **kwargs + self, + image: Union[Image, str], + command: Union[str, list[str], None] = None, + **kwargs, ) -> Container: """Create a container. @@ -34,12 +38,12 @@ def create( Keyword Args: auto_remove (bool): Enable auto-removal of the container on daemon side when the container's process exits. - blkio_weight_device (Dict[str, Any]): Block IO weight (relative device weight) + blkio_weight_device (dict[str, Any]): Block IO weight (relative device weight) in the form of: [{"Path": "device_path", "Weight": weight}]. blkio_weight (int): Block IO weight (relative weight), accepts a weight value between 10 and 1000. - cap_add (List[str]): Add kernel capabilities. For example: ["SYS_ADMIN", "MKNOD"] - cap_drop (List[str]): Drop kernel capabilities. + cap_add (list[str]): Add kernel capabilities. For example: ["SYS_ADMIN", "MKNOD"] + cap_drop (list[str]): Drop kernel capabilities. cgroup_parent (str): Override the default parent cgroup. cpu_count (int): Number of usable CPUs (Windows only). cpu_percent (int): Usable percentage of the available CPUs (Windows only). @@ -52,32 +56,32 @@ def create( cpuset_mems (str): Memory nodes (MEMs) in which to allow execution (0-3, 0,1). Only effective on NUMA systems. detach (bool): Run container in the background and return a Container object. - device_cgroup_rules (List[str]): A list of cgroup rules to apply to the container. + device_cgroup_rules (list[str]): A list of cgroup rules to apply to the container. device_read_bps: Limit read rate (bytes per second) from a device in the form of: `[{"Path": "device_path", "Rate": rate}]` device_read_iops: Limit read rate (IO per second) from a device. device_write_bps: Limit write rate (bytes per second) from a device. device_write_iops: Limit write rate (IO per second) from a device. - devices (List[str]): Expose host devices to the container, as a List[str] in the form + devices (list[str]): Expose host devices to the container, as a list[str] in the form ::. For example: /dev/sda:/dev/xvda:rwm allows the container to have read-write access to the host's /dev/sda via a node named /dev/xvda inside the container. - dns (List[str]): Set custom DNS servers. - dns_opt (List[str]): Additional options to be added to the container's resolv.conf file. - dns_search (List[str]): DNS search domains. - domainname (Union[str, List[str]]): Set custom DNS search domains. - entrypoint (Union[str, List[str]]): The entrypoint for the container. - environment (Union[Dict[str, str], List[str]): Environment variables to set inside - the container, as a dictionary or a List[str] in the format + dns (list[str]): Set custom DNS servers. + dns_opt (list[str]): Additional options to be added to the container's resolv.conf file. + dns_search (list[str]): DNS search domains. + domainname (Union[str, list[str]]): Set custom DNS search domains. + entrypoint (Union[str, list[str]]): The entrypoint for the container. + environment (Union[dict[str, str], list[str]): Environment variables to set inside + the container, as a dictionary or a list[str] in the format ["SOMEVARIABLE=xxx", "SOMEOTHERVARIABLE=xyz"]. - extra_hosts (Dict[str, str]): Additional hostnames to resolve inside the container, + extra_hosts (dict[str, str]): Additional hostnames to resolve inside the container, as a mapping of hostname to IP address. - group_add (List[str]): List of additional group names and/or IDs that the container + group_add (list[str]): List of additional group names and/or IDs that the container process will run as. - healthcheck (Dict[str,Any]): Specify a test to perform to check that the + healthcheck (dict[str,Any]): Specify a test to perform to check that the container is healthy. health_check_on_failure_action (int): Specify an action if a healthcheck fails. hostname (str): Optional hostname for the container. @@ -86,14 +90,14 @@ def create( ipc_mode (str): Set the IPC mode for the container. isolation (str): Isolation technology to use. Default: `None`. kernel_memory (int or str): Kernel memory limit - labels (Union[Dict[str, str], List[str]): A dictionary of name-value labels (e.g. + labels (Union[dict[str, str], list[str]): A dictionary of name-value labels (e.g. {"label1": "value1", "label2": "value2"}) or a list of names of labels to set with empty values (e.g. ["label1", "label2"]) - links (Optional[Dict[str, str]]): Mapping of links using the {'container': 'alias'} + links (Optional[dict[str, str]]): Mapping of links using the {'container': 'alias'} format. The alias is optional. Containers declared in this dict will be linked to the new container using the provided alias. Default: None. log_config (LogConfig): Logging configuration. - lxc_config (Dict[str, str]): LXC config. + lxc_config (dict[str, str]): LXC config. mac_address (str): MAC address to assign to the container. mem_limit (Union[int, str]): Memory limit. Accepts float values (which represent the memory limit of the created container in bytes) or a string with a units @@ -104,7 +108,7 @@ def create( between 0 and 100. memswap_limit (Union[int, str]): Maximum amount of memory + swap a container is allowed to consume. - mounts (List[Mount]): Specification for mounts to be added to the container. More + mounts (list[Mount]): Specification for mounts to be added to the container. More powerful alternative to volumes. Each item in the list is expected to be a Mount object. For example: @@ -150,7 +154,7 @@ def create( ] name (str): The name for this container. nano_cpus (int): CPU quota in units of 1e-9 CPUs. - networks (Dict[str, Dict[str, Union[str, List[str]]): + networks (dict[str, dict[str, Union[str, list[str]]): Networks which will be connected to container during container creation Values of the network configuration can be : @@ -176,7 +180,23 @@ def create( pids_limit (int): Tune a container's pids limit. Set -1 for unlimited. platform (str): Platform in the format os[/arch[/variant]]. Only used if the method needs to pull the requested image. - ports (Dict[Union[int, str], Union[int, Tuple[str, int], List[int], Dict[str, Union[int, Tuple[str, int], List[int]]]]]): Ports to bind inside the container. + ports ( + dict[ + Union[int, str], + Union[ + int, + Tuple[str, int], + list[int], + dict[ + str, + Union[ + int, + Tuple[str, int], + list[int] + ] + ] + ] + ]): Ports to bind inside the container. The keys of the dictionary are the ports to bind inside the container, either as an integer or a string in the form port/protocol, where the protocol is either @@ -225,7 +245,8 @@ def create( read_only (bool): Mount the container's root filesystem as read only. read_write_tmpfs (bool): Mount temporary file systems as read write, in case of read_only options set to True. Default: False - restart_policy (Dict[str, Union[str, int]]): Restart the container when it exits. + restart_policy (dict[str, Union[str, int]]): Restart the container when it exits. + remove (bool): Remove the container when it has finished running. Default: False. Configured as a dictionary with keys: - Name: One of on-failure, or always. @@ -233,7 +254,7 @@ def create( For example: {"Name": "on-failure", "MaximumRetryCount": 5} runtime (str): Runtime to use with this container. - secrets (List[Union[str, Secret, Dict[str, Union[str, int]]]]): Secrets to + secrets (list[Union[str, Secret, dict[str, Union[str, int]]]]): Secrets to mount to this container. For example: @@ -267,42 +288,44 @@ def create( }, ] - secret_env (Dict[str, str]): Secrets to add as environment variables available in the + secret_env (dict[str, str]): Secrets to add as environment variables available in the container. For example: {"VARIABLE1": "NameOfSecret", "VARIABLE2": "NameOfAnotherSecret"} - security_opt (List[str]): A List[str]ing values to customize labels for MLS systems, + security_opt (list[str]): A list[str]ing values to customize labels for MLS systems, such as SELinux. shm_size (Union[str, int]): Size of /dev/shm (e.g. 1G). stdin_open (bool): Keep STDIN open even if not attached. stdout (bool): Return logs from STDOUT when detach=False. Default: True. stderr (bool): Return logs from STDERR when detach=False. Default: False. stop_signal (str): The stop signal to use to stop the container (e.g. SIGINT). - storage_opt (Dict[str, str]): Storage driver options per container as a + storage_opt (dict[str, str]): Storage driver options per container as a key-value mapping. stream (bool): If true and detach is false, return a log generator instead of a string. Ignored if detach is true. Default: False. - sysctls (Dict[str, str]): Kernel parameters to set in the container. - tmpfs (Dict[str, str]): Temporary filesystems to mount, as a dictionary mapping a + sysctls (dict[str, str]): Kernel parameters to set in the container. + tmpfs (dict[str, str]): Temporary filesystems to mount, as a dictionary mapping a path inside the container to options for that path. For example: {'/mnt/vol2': '', '/mnt/vol1': 'size=3G,uid=1000'} tty (bool): Allocate a pseudo-TTY. - ulimits (List[Ulimit]): Ulimits to set inside the container. + ulimits (list[Ulimit]): Ulimits to set inside the container. use_config_proxy (bool): If True, and if the docker client configuration file (~/.config/containers/config.json by default) contains a proxy configuration, the corresponding environment variables will be set in the container being built. user (Union[str, int]): Username or UID to run commands as inside the container. userns_mode (str): Sets the user namespace mode for the container when user namespace - remapping option is enabled. Supported values documented `here `_ + remapping option is enabled. Supported values documented + `here `_ uts_mode (str): Sets the UTS namespace mode for the container. - `These `_ are the supported values. + `These `_ + are the supported values. version (str): The version of the API to use. Set to auto to automatically detect the server's version. Default: 3.0.0 volume_driver (str): The name of a volume driver/plugin. - volumes (Dict[str, Dict[str, Union[str, list]]]): A dictionary to configure + volumes (dict[str, dict[str, Union[str, list]]]): A dictionary to configure volumes mounted inside the container. The key is either the host path or a volume name, and the value is a dictionary with the keys: @@ -330,7 +353,7 @@ def create( } - volumes_from (List[str]): List of container names or IDs to get volumes from. + volumes_from (list[str]): List of container names or IDs to get volumes from. working_dir (str): Path to the working directory. workdir (str): Alias of working_dir - Path to the working directory. @@ -362,7 +385,7 @@ def create( # pylint: disable=too-many-locals,too-many-statements,too-many-branches @staticmethod - def _render_payload(kwargs: MutableMapping[str, Any]) -> Dict[str, Any]: + def _render_payload(kwargs: MutableMapping[str, Any]) -> dict[str, Any]: """Map create/run kwargs into body parameters.""" args = copy.copy(kwargs) @@ -572,13 +595,13 @@ def to_bytes(size: Union[int, str, None]) -> Union[int, None]: regular_options = ["consistency", "mode", "size"] for k, v in item.items(): - k = k.lower() - option_name = names_dict.get(k, k) - if k in bool_options and v is True: + _k = k.lower() + option_name = names_dict.get(_k, _k) + if _k in bool_options and v is True: options.append(option_name) - elif k in regular_options: + elif _k in regular_options: options.append(f'{option_name}={v}') - elif k in simple_options: + elif _k in simple_options: options.append(v) mount_point["options"] = options @@ -627,13 +650,15 @@ def parse_host_port(_container_port, _protocol, _host): return result for container, host in args.pop("ports", {}).items(): - if isinstance(container, int): - container = str(container) + # avoid redefinition of the loop variable, then ensure it's a string + str_container = container + if isinstance(str_container, int): + str_container = str(str_container) - if "/" in container: - container_port, protocol = container.split("/") + if "/" in str_container: + container_port, protocol = str_container.split("/") else: - container_port, protocol = container, "tcp" + container_port, protocol = str_container, "tcp" port_map_list = parse_host_port(container_port, protocol, host) params["portmappings"].extend(port_map_list) diff --git a/podman/domain/containers_manager.py b/podman/domain/containers_manager.py index b204e877..b6318d76 100644 --- a/podman/domain/containers_manager.py +++ b/podman/domain/containers_manager.py @@ -2,7 +2,8 @@ import logging import urllib -from typing import Any, Dict, List, Mapping, Union +from typing import Any, Union +from collections.abc import Mapping from podman import api from podman.domain.containers import Container @@ -44,7 +45,7 @@ def get(self, key: str) -> Container: response.raise_for_status() return self.prepare_model(attrs=response.json()) - def list(self, **kwargs) -> List[Container]: + def list(self, **kwargs) -> list[Container]: """Report on containers. Keyword Args: @@ -57,7 +58,7 @@ def list(self, **kwargs) -> List[Container]: - exited (int): Only containers with specified exit code - status (str): One of restarting, running, paused, exited - - label (Union[str, List[str]]): Format either "key", "key=value" or a list of such. + - label (Union[str, list[str]]): Format either "key", "key=value" or a list of such. - id (str): The id of the container. - name (str): The name of the container. - ancestor (str): Filter by container ancestor. Format of @@ -90,17 +91,17 @@ def list(self, **kwargs) -> List[Container]: return [self.prepare_model(attrs=i) for i in response.json()] - def prune(self, filters: Mapping[str, str] = None) -> Dict[str, Any]: + def prune(self, filters: Mapping[str, str] = None) -> dict[str, Any]: """Delete stopped containers. Args: filters: Criteria for determining containers to remove. Available keys are: - until (str): Delete containers before this time - - label (List[str]): Labels associated with containers + - label (list[str]): Labels associated with containers Returns: Keys: - - ContainersDeleted (List[str]): Identifiers of deleted containers. + - ContainersDeleted (list[str]): Identifiers of deleted containers. - SpaceReclaimed (int): Amount of disk space reclaimed in bytes. Raises: diff --git a/podman/domain/containers_run.py b/podman/domain/containers_run.py index c20d807a..c393268e 100644 --- a/podman/domain/containers_run.py +++ b/podman/domain/containers_run.py @@ -3,7 +3,8 @@ import logging import threading from contextlib import suppress -from typing import Generator, Iterator, List, Union +from typing import Union +from collections.abc import Generator, Iterator from podman.domain.containers import Container from podman.domain.images import Image @@ -18,7 +19,7 @@ class RunMixin: # pylint: disable=too-few-public-methods def run( self, image: Union[str, Image], - command: Union[str, List[str], None] = None, + command: Union[str, list[str], None] = None, *, stdout=True, stderr=False, diff --git a/podman/domain/events.py b/podman/domain/events.py index 34972ec2..2fe68061 100644 --- a/podman/domain/events.py +++ b/podman/domain/events.py @@ -3,7 +3,8 @@ import json import logging from datetime import datetime -from typing import Any, Dict, Optional, Union, Iterator +from typing import Any, Optional, Union +from collections.abc import Iterator from podman import api from podman.api.client import APIClient @@ -26,9 +27,9 @@ def list( self, since: Union[datetime, int, None] = None, until: Union[datetime, int, None] = None, - filters: Optional[Dict[str, Any]] = None, + filters: Optional[dict[str, Any]] = None, decode: bool = False, - ) -> Iterator[Union[str, Dict[str, Any]]]: + ) -> Iterator[Union[str, dict[str, Any]]]: """Report on networks. Args: @@ -38,7 +39,7 @@ def list( until: Get events older than this time. Yields: - When decode is True, Iterator[Dict[str, Any]] + When decode is True, Iterator[dict[str, Any]] When decode is False, Iterator[str] """ diff --git a/podman/domain/images.py b/podman/domain/images.py index 63d7c78b..6e62acf9 100644 --- a/podman/domain/images.py +++ b/podman/domain/images.py @@ -1,11 +1,12 @@ """Model and Manager for Image resources.""" import logging -from typing import Any, Dict, Iterator, List, Optional, Union +from typing import Any, Optional, Literal, Union +from collections.abc import Iterator import urllib.parse -from podman import api +from podman.api import DEFAULT_CHUNK_SIZE from podman.domain.manager import PodmanResource from podman.errors import ImageNotFound, InvalidArgument @@ -36,7 +37,7 @@ def tags(self): return [tag for tag in repo_tags if tag != ":"] - def history(self) -> List[Dict[str, Any]]: + def history(self) -> list[dict[str, Any]]: """Returns history of the Image. Raises: @@ -49,7 +50,7 @@ def history(self) -> List[Dict[str, Any]]: def remove( self, **kwargs - ) -> List[Dict[api.Literal["Deleted", "Untagged", "Errors", "ExitCode"], Union[str, int]]]: + ) -> list[dict[Literal["Deleted", "Untagged", "Errors", "ExitCode"], Union[str, int]]]: """Delete image from Podman service. Podman only @@ -69,7 +70,7 @@ def remove( def save( self, - chunk_size: Optional[int] = api.DEFAULT_CHUNK_SIZE, + chunk_size: Optional[int] = DEFAULT_CHUNK_SIZE, named: Union[str, bool] = False, ) -> Iterator[bytes]: """Returns Image as tarball. diff --git a/podman/domain/images_build.py b/podman/domain/images_build.py index 84de5d5b..f1fc9c38 100644 --- a/podman/domain/images_build.py +++ b/podman/domain/images_build.py @@ -7,7 +7,8 @@ import re import shutil import tempfile -from typing import Any, Dict, Iterator, List, Tuple +from typing import Any +from collections.abc import Iterator import itertools @@ -22,7 +23,7 @@ class BuildMixin: """Class providing build method for ImagesManager.""" # pylint: disable=too-many-locals,too-many-branches,too-few-public-methods,too-many-statements - def build(self, **kwargs) -> Tuple[Image, Iterator[bytes]]: + def build(self, **kwargs) -> tuple[Image, Iterator[bytes]]: """Returns built image. Keyword Args: @@ -39,7 +40,7 @@ def build(self, **kwargs) -> Tuple[Image, Iterator[bytes]]: forcerm (bool) – Always remove intermediate containers, even after unsuccessful builds dockerfile (str) – full path to the Dockerfile / Containerfile buildargs (Mapping[str,str) – A dictionary of build arguments - container_limits (Dict[str, Union[int,str]]) – + container_limits (dict[str, Union[int,str]]) – A dictionary of limits applied to each container created by the build process. Valid keys: @@ -52,11 +53,11 @@ def build(self, **kwargs) -> Tuple[Image, Iterator[bytes]]: shmsize (int) – Size of /dev/shm in bytes. The size must be greater than 0. If omitted the system uses 64MB labels (Mapping[str,str]) – A dictionary of labels to set on the image - cache_from (List[str]) – A list of image's identifier used for build cache resolution + cache_from (list[str]) – A list of image's identifier used for build cache resolution target (str) – Name of the build-stage to build in a multi-stage Dockerfile network_mode (str) – networking mode for the run commands during build squash (bool) – Squash the resulting images layers into a single layer. - extra_hosts (Dict[str,str]) – Extra hosts to add to /etc/hosts in building + extra_hosts (dict[str,str]) – Extra hosts to add to /etc/hosts in building containers, as a mapping of hostname to IP address. platform (str) – Platform in the format os[/arch[/variant]]. isolation (str) – Isolation technology used during build. (ignored) @@ -140,7 +141,7 @@ def build(self, **kwargs) -> Tuple[Image, Iterator[bytes]]: raise BuildError(unknown or "Unknown", report_stream) @staticmethod - def _render_params(kwargs) -> Dict[str, List[Any]]: + def _render_params(kwargs) -> dict[str, list[Any]]: """Map kwargs to query parameters. All unsupported kwargs are silently ignored. diff --git a/podman/domain/images_manager.py b/podman/domain/images_manager.py index ce2c641f..fdf54c6c 100644 --- a/podman/domain/images_manager.py +++ b/podman/domain/images_manager.py @@ -1,16 +1,17 @@ """PodmanResource manager subclassed for Images.""" +import builtins import io import json import logging import os import urllib.parse -from typing import Any, Dict, Iterator, List, Mapping, Optional, Union, Generator +from typing import Any, Literal, Optional, Union +from collections.abc import Iterator, Mapping, Generator from pathlib import Path import requests from podman import api -from podman.api import Literal from podman.api.http_utils import encode_auth_header from podman.domain.images import Image from podman.domain.images_build import BuildMixin @@ -47,17 +48,17 @@ def exists(self, key: str) -> bool: response = self.client.get(f"/images/{key}/exists") return response.ok - def list(self, **kwargs) -> List[Image]: + def list(self, **kwargs) -> builtins.list[Image]: """Report on images. Keyword Args: name (str) – Only show images belonging to the repository name all (bool) – Show intermediate image layers. By default, these are filtered out. - filters (Mapping[str, Union[str, List[str]]) – Filters to be used on the image list. + filters (Mapping[str, Union[str, list[str]]) – Filters to be used on the image list. Available filters: - dangling (bool) - - label (Union[str, List[str]]): format either "key" or "key=value" + - label (Union[str, list[str]]): format either "key" or "key=value" Raises: APIError: when service returns an error @@ -170,7 +171,7 @@ def prune( all: Optional[bool] = False, # pylint: disable=redefined-builtin external: Optional[bool] = False, filters: Optional[Mapping[str, Any]] = None, - ) -> Dict[Literal["ImagesDeleted", "SpaceReclaimed"], Any]: + ) -> dict[Literal["ImagesDeleted", "SpaceReclaimed"], Any]: """Delete unused images. The Untagged keys will always be "". @@ -201,8 +202,8 @@ def prune( response = self.client.post("/images/prune", params=params) response.raise_for_status() - deleted: List[Dict[str, str]] = [] - error: List[str] = [] + deleted: builtins.list[dict[str, str]] = [] + error: builtins.list[str] = [] reclaimed: int = 0 # If the prune doesn't remove images, the API returns "null" # and it's interpreted as None (NoneType) @@ -228,7 +229,7 @@ def prune( "SpaceReclaimed": reclaimed, } - def prune_builds(self) -> Dict[Literal["CachesDeleted", "SpaceReclaimed"], Any]: + def prune_builds(self) -> dict[Literal["CachesDeleted", "SpaceReclaimed"], Any]: """Delete builder cache. Method included to complete API, the operation always returns empty @@ -238,7 +239,7 @@ def prune_builds(self) -> Dict[Literal["CachesDeleted", "SpaceReclaimed"], Any]: def push( self, repository: str, tag: Optional[str] = None, **kwargs - ) -> Union[str, Iterator[Union[str, Dict[str, Any]]]]: + ) -> Union[str, Iterator[Union[str, dict[str, Any]]]]: """Push Image or repository to the registry. Args: @@ -248,7 +249,7 @@ def push( Keyword Args: auth_config (Mapping[str, str]: Override configured credentials. Must include username and password keys. - decode (bool): return data from server as Dict[str, Any]. Ignored unless stream=True. + decode (bool): return data from server as dict[str, Any]. Ignored unless stream=True. destination (str): alternate destination for image. (Podman only) stream (bool): return output as blocking generator. Default: False. tlsVerify (bool): Require TLS verification. @@ -258,7 +259,7 @@ def push( Raises: APIError: when service returns an error """ - auth_config: Optional[Dict[str, str]] = kwargs.get("auth_config") + auth_config: Optional[dict[str, str]] = kwargs.get("auth_config") headers = { # A base64url-encoded auth configuration @@ -300,8 +301,8 @@ def push( @staticmethod def _push_helper( - decode: bool, body: List[Dict[str, Any]] - ) -> Iterator[Union[str, Dict[str, Any]]]: + decode: bool, body: builtins.list[dict[str, Any]] + ) -> Iterator[Union[str, dict[str, Any]]]: """Helper needed to allow push() to return either a generator or a str.""" for entry in body: if decode: @@ -311,8 +312,12 @@ def _push_helper( # pylint: disable=too-many-locals,too-many-branches def pull( - self, repository: str, tag: Optional[str] = None, all_tags: bool = False, **kwargs - ) -> Union[Image, List[Image], Iterator[str]]: + self, + repository: str, + tag: Optional[str] = None, + all_tags: bool = False, + **kwargs, + ) -> Union[Image, builtins.list[Image], Iterator[str]]: """Request Podman service to pull image(s) from repository. Args: @@ -350,7 +355,7 @@ def pull( else: tag = "latest" - auth_config: Optional[Dict[str, str]] = kwargs.get("auth_config") + auth_config: Optional[dict[str, str]] = kwargs.get("auth_config") headers = { # A base64url-encoded auth configuration @@ -415,7 +420,7 @@ def pull( for item in reversed(list(response.iter_lines())): obj = json.loads(item) if all_tags and "images" in obj: - images: List[Image] = [] + images: builtins.list[Image] = [] for name in obj["images"]: images.append(self.get(name)) return images @@ -460,7 +465,7 @@ def remove( image: Union[Image, str], force: Optional[bool] = None, noprune: bool = False, # pylint: disable=unused-argument - ) -> List[Dict[Literal["Deleted", "Untagged", "Errors", "ExitCode"], Union[str, int]]]: + ) -> builtins.list[dict[Literal["Deleted", "Untagged", "Errors", "ExitCode"], Union[str, int]]]: """Delete image from Podman service. Args: @@ -479,7 +484,7 @@ def remove( response.raise_for_status(not_found=ImageNotFound) body = response.json() - results: List[Dict[str, Union[int, str]]] = [] + results: builtins.list[dict[str, Union[int, str]]] = [] for key in ("Deleted", "Untagged", "Errors"): if key in body: for element in body[key]: @@ -487,14 +492,14 @@ def remove( results.append({"ExitCode": body["ExitCode"]}) return results - def search(self, term: str, **kwargs) -> List[Dict[str, Any]]: + def search(self, term: str, **kwargs) -> builtins.list[dict[str, Any]]: """Search Images on registries. Args: term: Used to target Image results. Keyword Args: - filters (Mapping[str, List[str]): Refine results of search. Available filters: + filters (Mapping[str, list[str]): Refine results of search. Available filters: - is-automated (bool): Image build is automated. - is-official (bool): Image build is owned by product provider. diff --git a/podman/domain/ipam.py b/podman/domain/ipam.py index 2cf445e4..f446841c 100644 --- a/podman/domain/ipam.py +++ b/podman/domain/ipam.py @@ -3,7 +3,8 @@ Provided for compatibility """ -from typing import Any, List, Mapping, Optional +from typing import Any, Optional +from collections.abc import Mapping class IPAMPool(dict): @@ -41,7 +42,7 @@ class IPAMConfig(dict): def __init__( self, driver: Optional[str] = "host-local", - pool_configs: Optional[List[IPAMPool]] = None, + pool_configs: Optional[list[IPAMPool]] = None, options: Optional[Mapping[str, Any]] = None, ): """Create IPAMConfig. diff --git a/podman/domain/json_stream.py b/podman/domain/json_stream.py index 6978fc2f..399e4295 100644 --- a/podman/domain/json_stream.py +++ b/podman/domain/json_stream.py @@ -14,9 +14,10 @@ def stream_as_text(stream): instead of byte streams. """ for data in stream: + _data = data if not isinstance(data, str): - data = data.decode('utf-8', 'replace') - yield data + _data = data.decode('utf-8', 'replace') + yield _data def json_splitter(buffer): diff --git a/podman/domain/manager.py b/podman/domain/manager.py index 9be479d9..ffbad3c4 100644 --- a/podman/domain/manager.py +++ b/podman/domain/manager.py @@ -2,7 +2,8 @@ from abc import ABC, abstractmethod from collections import abc -from typing import Any, List, Mapping, Optional, TypeVar, Union +from typing import Any, Optional, TypeVar, Union +from collections.abc import Mapping from podman.api.client import APIClient @@ -10,7 +11,7 @@ PodmanResourceType: TypeVar = TypeVar("PodmanResourceType", bound="PodmanResource") -class PodmanResource(ABC): +class PodmanResource(ABC): # noqa: B024 """Base class for representing resource of a Podman service. Attributes: @@ -108,7 +109,7 @@ def get(self, key: str) -> PodmanResourceType: """Returns representation of resource.""" @abstractmethod - def list(self, **kwargs) -> List[PodmanResourceType]: + def list(self, **kwargs) -> list[PodmanResourceType]: """Returns list of resources.""" def prepare_model(self, attrs: Union[PodmanResource, Mapping[str, Any]]) -> PodmanResourceType: diff --git a/podman/domain/manifests.py b/podman/domain/manifests.py index 98bd3a00..adc82a57 100644 --- a/podman/domain/manifests.py +++ b/podman/domain/manifests.py @@ -3,7 +3,7 @@ import logging import urllib.parse from contextlib import suppress -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union from podman import api from podman.domain.images import Image @@ -38,7 +38,7 @@ def quoted_name(self): @property def names(self): - """List[str]: Returns the identifier of the manifest.""" + """list[str]: Returns the identifier of the manifest.""" return self.name @property @@ -51,7 +51,7 @@ def version(self): """int: Returns the schema version type for this manifest.""" return self.attrs.get("schemaVersion") - def add(self, images: List[Union[Image, str]], **kwargs) -> None: + def add(self, images: list[Union[Image, str]], **kwargs) -> None: """Add Image to manifest list. Args: @@ -59,9 +59,9 @@ def add(self, images: List[Union[Image, str]], **kwargs) -> None: Keyword Args: all (bool): - annotation (Dict[str, str]): + annotation (dict[str, str]): arch (str): - features (List[str]): + features (list[str]): os (str): os_version (str): variant (str): @@ -82,9 +82,11 @@ def add(self, images: List[Union[Image, str]], **kwargs) -> None: "operation": "update", } for item in images: - if isinstance(item, Image): - item = item.attrs["RepoTags"][0] - data["images"].append(item) + # avoid redefinition of the loop variable, then ensure it's an image + img_item = item + if isinstance(img_item, Image): + img_item = img_item.attrs["RepoTags"][0] + data["images"].append(img_item) data = api.prepare_body(data) response = self.client.put(f"/manifests/{self.quoted_name}", data=data) @@ -151,7 +153,7 @@ def resource(self): def create( self, name: str, - images: Optional[List[Union[Image, str]]] = None, + images: Optional[list[Union[Image, str]]] = None, all: Optional[bool] = None, # pylint: disable=redefined-builtin ) -> Manifest: """Create a Manifest. @@ -165,13 +167,15 @@ def create( ValueError: when no names are provided NotFoundImage: when a given image does not exist """ - params: Dict[str, Any] = {} + params: dict[str, Any] = {} if images is not None: params["images"] = [] for item in images: - if isinstance(item, Image): - item = item.attrs["RepoTags"][0] - params["images"].append(item) + # avoid redefinition of the loop variable, then ensure it's an image + img_item = item + if isinstance(img_item, Image): + img_item = img_item.attrs["RepoTags"][0] + params["images"].append(img_item) if all is not None: params["all"] = all @@ -215,12 +219,12 @@ def get(self, key: str) -> Manifest: body["names"] = key return self.prepare_model(attrs=body) - def list(self, **kwargs) -> List[Manifest]: + def list(self, **kwargs) -> list[Manifest]: """Not Implemented.""" raise NotImplementedError("Podman service currently does not support listing manifests.") - def remove(self, name: Union[Manifest, str]) -> Dict[str, Any]: + def remove(self, name: Union[Manifest, str]) -> dict[str, Any]: """Delete the manifest list from the Podman service.""" if isinstance(name, Manifest): name = name.name diff --git a/podman/domain/networks.py b/podman/domain/networks.py index b509132a..bbc89539 100644 --- a/podman/domain/networks.py +++ b/podman/domain/networks.py @@ -24,7 +24,7 @@ class Network(PodmanResource): """Details and configuration for a networks managed by the Podman service. Attributes: - attrs (Dict[str, Any]): Attributes of Network reported from Podman service + attrs (dict[str, Any]): Attributes of Network reported from Podman service """ @property @@ -41,7 +41,7 @@ def id(self): # pylint: disable=invalid-name @property def containers(self): - """List[Container]: Returns list of Containers connected to network.""" + """list[Container]: Returns list of Containers connected to network.""" with suppress(KeyError): container_manager = ContainersManager(client=self.client) return [container_manager.get(ident) for ident in self.attrs["Containers"].keys()] @@ -71,12 +71,12 @@ def connect(self, container: Union[str, Container], *_, **kwargs) -> None: container: To add to this Network Keyword Args: - aliases (List[str]): Aliases to add for this endpoint - driver_opt (Dict[str, Any]): Options to provide to network driver + aliases (list[str]): Aliases to add for this endpoint + driver_opt (dict[str, Any]): Options to provide to network driver ipv4_address (str): IPv4 address for given Container on this network ipv6_address (str): IPv6 address for given Container on this network - link_local_ips (List[str]): list of link-local addresses - links (List[Union[str, Containers]]): Ignored + link_local_ips (list[str]): list of link-local addresses + links (list[Union[str, Containers]]): Ignored Raises: APIError: when Podman service reports an error diff --git a/podman/domain/networks_manager.py b/podman/domain/networks_manager.py index 2deb175c..c7c92f79 100644 --- a/podman/domain/networks_manager.py +++ b/podman/domain/networks_manager.py @@ -12,10 +12,9 @@ import ipaddress import logging from contextlib import suppress -from typing import Any, Dict, List, Optional +from typing import Any, Optional, Literal -from podman import api -from podman.api import http_utils +from podman.api import http_utils, prepare_filters from podman.domain.manager import Manager from podman.domain.networks import Network from podman.errors import APIError @@ -46,8 +45,8 @@ def create(self, name: str, **kwargs) -> Network: ingress (bool): Ignored, always False. internal (bool): Restrict external access to the network. ipam (IPAMConfig): Optional custom IP scheme for the network. - labels (Dict[str, str]): Map of labels to set on the network. - options (Dict[str, Any]): Driver options. + labels (dict[str, str]): Map of labels to set on the network. + options (dict[str, Any]): Driver options. scope (str): Ignored, always "local". Raises: @@ -75,7 +74,7 @@ def create(self, name: str, **kwargs) -> Network: response.raise_for_status() return self.prepare_model(attrs=response.json()) - def _prepare_ipam(self, data: Dict[str, Any], ipam: Dict[str, Any]): + def _prepare_ipam(self, data: dict[str, Any], ipam: dict[str, Any]): if "Driver" in ipam: data["ipam_options"] = {"driver": ipam["Driver"]} @@ -117,23 +116,23 @@ def get(self, key: str) -> Network: return self.prepare_model(attrs=response.json()) - def list(self, **kwargs) -> List[Network]: + def list(self, **kwargs) -> list[Network]: """Report on networks. Keyword Args: - names (List[str]): List of names to filter by. - ids (List[str]): List of identifiers to filter by. + names (list[str]): List of names to filter by. + ids (list[str]): List of identifiers to filter by. filters (Mapping[str,str]): Criteria for listing networks. Available filters: - driver="bridge": Matches a network's driver. Only "bridge" is supported. - - label=(Union[str, List[str]]): format either "key", "key=value" + - label=(Union[str, list[str]]): format either "key", "key=value" or a list of such. - type=(str): Filters networks by type, legal values are: - "custom" - "builtin" - - plugin=(List[str]]): Matches CNI plugins included in a network, legal + - plugin=(list[str]]): Matches CNI plugins included in a network, legal values are (Podman only): - bridge @@ -152,7 +151,7 @@ def list(self, **kwargs) -> List[Network]: filters = kwargs.get("filters", {}) filters["name"] = kwargs.get("names") filters["id"] = kwargs.get("ids") - filters = api.prepare_filters(filters) + filters = prepare_filters(filters) params = {"filters": filters} response = self.client.get("/networks/json", params=params) @@ -161,8 +160,8 @@ def list(self, **kwargs) -> List[Network]: return [self.prepare_model(i) for i in response.json()] def prune( - self, filters: Optional[Dict[str, Any]] = None - ) -> Dict[api.Literal["NetworksDeleted", "SpaceReclaimed"], Any]: + self, filters: Optional[dict[str, Any]] = None + ) -> dict[Literal["NetworksDeleted", "SpaceReclaimed"], Any]: """Delete unused Networks. SpaceReclaimed always reported as 0 @@ -173,11 +172,11 @@ def prune( Raises: APIError: when service reports error """ - params = {"filters": api.prepare_filters(filters)} + params = {"filters": prepare_filters(filters)} response = self.client.post("/networks/prune", params=params) response.raise_for_status() - deleted: List[str] = [] + deleted: list[str] = [] for item in response.json(): if item["Error"] is not None: raise APIError( diff --git a/podman/domain/pods.py b/podman/domain/pods.py index 30a0d19c..56cd905c 100644 --- a/podman/domain/pods.py +++ b/podman/domain/pods.py @@ -1,11 +1,11 @@ """Model and Manager for Pod resources.""" import logging -from typing import Any, Dict, Optional, Tuple, Union +from typing import Any, Optional, Union from podman.domain.manager import PodmanResource -_Timeout = Union[None, float, Tuple[float, float], Tuple[float, None]] +_Timeout = Union[None, float, tuple[float, float], tuple[float, None]] logger = logging.getLogger("podman.pods") @@ -88,7 +88,7 @@ def stop(self, timeout: _Timeout = None) -> None: response = self.client.post(f"/pods/{self.id}/stop", params=params) response.raise_for_status() - def top(self, **kwargs) -> Dict[str, Any]: + def top(self, **kwargs) -> dict[str, Any]: """Report on running processes in pod. Keyword Args: diff --git a/podman/domain/pods_manager.py b/podman/domain/pods_manager.py index 5918f2bd..77828606 100644 --- a/podman/domain/pods_manager.py +++ b/podman/domain/pods_manager.py @@ -1,8 +1,10 @@ """PodmanResource manager subclassed for Networks.""" +import builtins import json import logging -from typing import Any, Dict, List, Optional, Union, Iterator +from typing import Any, Optional, Union +from collections.abc import Iterator from podman import api from podman.domain.manager import Manager @@ -57,24 +59,24 @@ def get(self, pod_id: str) -> Pod: # pylint: disable=arguments-differ,arguments response.raise_for_status() return self.prepare_model(attrs=response.json()) - def list(self, **kwargs) -> List[Pod]: + def list(self, **kwargs) -> builtins.list[Pod]: """Report on pods. Keyword Args: filters (Mapping[str, str]): Criteria for listing pods. Available filters: - - ctr-ids (List[str]): List of container ids to filter by. - - ctr-names (List[str]): List of container names to filter by. - - ctr-number (List[int]): list pods with given number of containers. - - ctr-status (List[str]): List pods with containers in given state. + - ctr-ids (list[str]): list of container ids to filter by. + - ctr-names (list[str]): list of container names to filter by. + - ctr-number (list[int]): list pods with given number of containers. + - ctr-status (list[str]): list pods with containers in given state. Legal values are: "created", "running", "paused", "stopped", "exited", or "unknown" - id (str) - List pod with this id. - name (str) - List pod with this name. - - status (List[str]): List pods in given state. Legal values are: + - status (list[str]): List pods in given state. Legal values are: "created", "running", "paused", "stopped", "exited", or "unknown" - - label (List[str]): List pods with given labels. - - network (List[str]): List pods associated with given Network Ids (not Names). + - label (list[str]): List pods with given labels. + - network (list[str]): List pods associated with given Network Ids (not Names). Raises: APIError: when an error returned by service @@ -84,12 +86,12 @@ def list(self, **kwargs) -> List[Pod]: response.raise_for_status() return [self.prepare_model(attrs=i) for i in response.json()] - def prune(self, filters: Optional[Dict[str, str]] = None) -> Dict[str, Any]: + def prune(self, filters: Optional[dict[str, str]] = None) -> dict[str, Any]: """Delete unused Pods. Returns: Dictionary Keys: - - PodsDeleted (List[str]): List of pod ids deleted. + - PodsDeleted (list[str]): List of pod ids deleted. - SpaceReclaimed (int): Always zero. Raises: @@ -98,7 +100,7 @@ def prune(self, filters: Optional[Dict[str, str]] = None) -> Dict[str, Any]: response = self.client.post("/pods/prune", params={"filters": api.prepare_filters(filters)}) response.raise_for_status() - deleted: List[str] = [] + deleted: builtins.list[str] = [] for item in response.json(): if item["Err"] is not None: raise APIError( @@ -129,12 +131,14 @@ def remove(self, pod_id: Union[Pod, str], force: Optional[bool] = None) -> None: response = self.client.delete(f"/pods/{pod_id}", params={"force": force}) response.raise_for_status() - def stats(self, **kwargs) -> Union[List[Dict[str, Any]], Iterator[List[Dict[str, Any]]]]: + def stats( + self, **kwargs + ) -> Union[builtins.list[dict[str, Any]], Iterator[builtins.list[dict[str, Any]]]]: """Resource usage statistics for the containers in pods. Keyword Args: all (bool): Provide statistics for all running pods. - name (Union[str, List[str]]): Pods to include in report. + name (Union[str, list[str]]): Pods to include in report. stream (bool): Stream statistics until cancelled. Default: False. decode (bool): If True, response will be decoded into dict. Default: False. diff --git a/podman/domain/registry_data.py b/podman/domain/registry_data.py index a792824d..5c0ad1ae 100644 --- a/podman/domain/registry_data.py +++ b/podman/domain/registry_data.py @@ -1,7 +1,8 @@ """Module for tracking registry metadata.""" import logging -from typing import Any, Mapping, Optional, Union +from typing import Any, Optional, Union +from collections.abc import Mapping from podman import api from podman.domain.images import Image diff --git a/podman/domain/secrets.py b/podman/domain/secrets.py index 77093ab0..20c81ac8 100644 --- a/podman/domain/secrets.py +++ b/podman/domain/secrets.py @@ -1,7 +1,8 @@ """Model and Manager for Secrets resources.""" from contextlib import suppress -from typing import Any, List, Mapping, Optional, Union +from typing import Any, Optional, Union +from collections.abc import Mapping from podman.api import APIClient from podman.domain.manager import Manager, PodmanResource @@ -75,11 +76,11 @@ def get(self, secret_id: str) -> Secret: # pylint: disable=arguments-differ,arg response.raise_for_status() return self.prepare_model(attrs=response.json()) - def list(self, **kwargs) -> List[Secret]: + def list(self, **kwargs) -> list[Secret]: """Report on Secrets. Keyword Args: - filters (Dict[str, Any]): Ignored. + filters (dict[str, Any]): Ignored. Raises: APIError: when error returned by service diff --git a/podman/domain/system.py b/podman/domain/system.py index 336421d7..88d66992 100644 --- a/podman/domain/system.py +++ b/podman/domain/system.py @@ -1,7 +1,7 @@ """SystemManager to provide system level information from Podman service.""" import logging -from typing import Any, Dict, Optional, Union +from typing import Any, Optional, Union from podman.api.client import APIClient from podman import api @@ -20,7 +20,7 @@ def __init__(self, client: APIClient) -> None: """ self.client = client - def df(self) -> Dict[str, Any]: # pylint: disable=invalid-name + def df(self) -> dict[str, Any]: # pylint: disable=invalid-name """Disk usage by Podman resources. Returns: @@ -30,7 +30,7 @@ def df(self) -> Dict[str, Any]: # pylint: disable=invalid-name response.raise_for_status() return response.json() - def info(self, *_, **__) -> Dict[str, Any]: + def info(self, *_, **__) -> dict[str, Any]: """Returns information on Podman service.""" response = self.client.get("/info") response.raise_for_status() @@ -48,7 +48,7 @@ def login( # pylint: disable=too-many-arguments,too-many-positional-arguments,u identitytoken: Optional[str] = None, registrytoken: Optional[str] = None, tls_verify: Optional[Union[bool, str]] = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Log into Podman service. Args: @@ -91,7 +91,7 @@ def ping(self) -> bool: response = self.client.head("/_ping") return response.ok - def version(self, **kwargs) -> Dict[str, Any]: + def version(self, **kwargs) -> dict[str, Any]: """Get version information from service. Keyword Args: diff --git a/podman/domain/volumes.py b/podman/domain/volumes.py index 6867d5c8..717db37d 100644 --- a/podman/domain/volumes.py +++ b/podman/domain/volumes.py @@ -1,12 +1,11 @@ """Model and Manager for Volume resources.""" import logging -from typing import Any, Dict, List, Optional, Union +from typing import Any, Literal, Optional, Union import requests from podman import api -from podman.api import Literal from podman.domain.manager import Manager, PodmanResource from podman.errors import APIError @@ -53,8 +52,8 @@ def create(self, name: Optional[str] = None, **kwargs) -> Volume: Keyword Args: driver (str): Volume driver to use - driver_opts (Dict[str, str]): Options to use with driver - labels (Dict[str, str]): Labels to apply to volume + driver_opts (dict[str, str]): Options to use with driver + labels (dict[str, str]): Labels to apply to volume Raises: APIError: when service reports error @@ -92,14 +91,14 @@ def get(self, volume_id: str) -> Volume: # pylint: disable=arguments-differ,arg response.raise_for_status() return self.prepare_model(attrs=response.json()) - def list(self, *_, **kwargs) -> List[Volume]: + def list(self, *_, **kwargs) -> list[Volume]: """Report on volumes. Keyword Args: - filters (Dict[str, str]): criteria to filter Volume list + filters (dict[str, str]): criteria to filter Volume list - driver (str): filter volumes by their driver - - label (Dict[str, str]): filter by label and/or value + - label (dict[str, str]): filter by label and/or value - name (str): filter by volume's name """ filters = api.prepare_filters(kwargs.get("filters")) @@ -113,8 +112,8 @@ def list(self, *_, **kwargs) -> List[Volume]: def prune( self, - filters: Optional[Dict[str, str]] = None, # pylint: disable=unused-argument - ) -> Dict[Literal["VolumesDeleted", "SpaceReclaimed"], Any]: + filters: Optional[dict[str, str]] = None, # pylint: disable=unused-argument + ) -> dict[Literal["VolumesDeleted", "SpaceReclaimed"], Any]: """Delete unused volumes. Args: @@ -127,7 +126,7 @@ def prune( data = response.json() response.raise_for_status() - volumes: List[str] = [] + volumes: list[str] = [] space_reclaimed = 0 for item in data: if "Err" in item: diff --git a/podman/errors/__init__.py b/podman/errors/__init__.py index 9a339112..ae8d9fa0 100644 --- a/podman/errors/__init__.py +++ b/podman/errors/__init__.py @@ -48,7 +48,9 @@ class NotFoundError(HTTPException): def __init__(self, message, response=None): super().__init__(message) self.response = response - warnings.warn("APIConnection() and supporting classes.", PendingDeprecationWarning) + warnings.warn( + "APIConnection() and supporting classes.", PendingDeprecationWarning, stacklevel=2 + ) # If found, use new ImageNotFound otherwise old class @@ -100,7 +102,9 @@ class RequestError(HTTPException): def __init__(self, message, response=None): super().__init__(message) self.response = response - warnings.warn("APIConnection() and supporting classes.", PendingDeprecationWarning) + warnings.warn( + "APIConnection() and supporting classes.", PendingDeprecationWarning, stacklevel=2 + ) class InternalServerError(HTTPException): @@ -112,4 +116,6 @@ class InternalServerError(HTTPException): def __init__(self, message, response=None): super().__init__(message) self.response = response - warnings.warn("APIConnection() and supporting classes.", PendingDeprecationWarning) + warnings.warn( + "APIConnection() and supporting classes.", PendingDeprecationWarning, stacklevel=2 + ) diff --git a/podman/errors/exceptions.py b/podman/errors/exceptions.py index ef3af2a0..f92d886c 100644 --- a/podman/errors/exceptions.py +++ b/podman/errors/exceptions.py @@ -1,6 +1,7 @@ """Podman API Errors.""" -from typing import Iterable, List, Optional, Union, TYPE_CHECKING +from typing import Optional, Union, TYPE_CHECKING +from collections.abc import Iterable from requests import Response from requests.exceptions import HTTPError @@ -112,7 +113,7 @@ def __init__( self, container: "Container", exit_status: int, - command: Union[str, List[str]], + command: Union[str, list[str]], image: str, stderr: Optional[Iterable[str]] = None, ): # pylint: disable=too-many-positional-arguments diff --git a/podman/tests/integration/test_adapters.py b/podman/tests/integration/test_adapters.py index 598597cb..9b6a74cb 100644 --- a/podman/tests/integration/test_adapters.py +++ b/podman/tests/integration/test_adapters.py @@ -39,10 +39,10 @@ def test_tcp_ping(self): podman.start(check_socket=False) time.sleep(0.5) - with PodmanClient(base_url=f"tcp:localhost:8889") as client: + with PodmanClient(base_url="tcp:localhost:8889") as client: self.assertTrue(client.ping()) - with PodmanClient(base_url=f"http://localhost:8889") as client: + with PodmanClient(base_url="http://localhost:8889") as client: self.assertTrue(client.ping()) finally: podman.stop() diff --git a/podman/tests/integration/test_container_exec.py b/podman/tests/integration/test_container_exec.py index 43190797..f0fd72b7 100644 --- a/podman/tests/integration/test_container_exec.py +++ b/podman/tests/integration/test_container_exec.py @@ -1,5 +1,3 @@ -import unittest - import podman.tests.integration.base as base from podman import PodmanClient @@ -113,10 +111,11 @@ def test_container_exec_run_stream_detach(self): ] error_code, output = container.exec_run(command, stream=True, detach=True) - # Detach should make the ``exec_run`` ignore the ``stream`` flag so we will assert against the standard, - # non-streaming behavior. + # Detach should make the ``exec_run`` ignore the ``stream`` flag so we will + # assert against the standard, non-streaming behavior. self.assertEqual(error_code, 0) - # The endpoint should return immediately, before we are able to actually get any of the output. + # The endpoint should return immediately, before we are able to actually + # get any of the output. self.assertEqual( output, b'\n', diff --git a/podman/tests/integration/test_containers.py b/podman/tests/integration/test_containers.py index 11b7ea81..dbb63aeb 100644 --- a/podman/tests/integration/test_containers.py +++ b/podman/tests/integration/test_containers.py @@ -7,9 +7,9 @@ try: # Python >= 3.10 from collections.abc import Iterator -except: +except ImportError: # Python < 3.10 - from collections import Iterator + from collections.abc import Iterator import podman.tests.integration.base as base from podman import PodmanClient diff --git a/podman/tests/integration/test_images.py b/podman/tests/integration/test_images.py index 19470168..cbfcd454 100644 --- a/podman/tests/integration/test_images.py +++ b/podman/tests/integration/test_images.py @@ -15,13 +15,9 @@ """Images integration tests.""" import io -import queue import tarfile -import threading import types import unittest -from contextlib import suppress -from datetime import datetime, timedelta import podman.tests.integration.base as base from podman import PodmanClient @@ -137,11 +133,11 @@ def test_search(self): @unittest.skip("Needs Podman 3.1.0") def test_corrupt_load(self): with self.assertRaises(APIError) as e: - next(self.client.images.load("This is a corrupt tarball".encode("utf-8"))) + next(self.client.images.load(b"This is a corrupt tarball")) self.assertIn("payload does not match", e.exception.explanation) def test_build(self): - buffer = io.StringIO(f"""FROM quay.io/libpod/alpine_labels:latest""") + buffer = io.StringIO("""FROM quay.io/libpod/alpine_labels:latest""") image, stream = self.client.images.build(fileobj=buffer) self.assertIsNotNone(image) diff --git a/podman/tests/integration/test_networks.py b/podman/tests/integration/test_networks.py index c034ca8d..76e9b854 100644 --- a/podman/tests/integration/test_networks.py +++ b/podman/tests/integration/test_networks.py @@ -14,7 +14,6 @@ # """Network integration tests.""" -import os import random import unittest from contextlib import suppress diff --git a/podman/tests/integration/utils.py b/podman/tests/integration/utils.py index 262bf86e..05f7c6d7 100644 --- a/podman/tests/integration/utils.py +++ b/podman/tests/integration/utils.py @@ -20,7 +20,7 @@ import subprocess import threading from contextlib import suppress -from typing import List, Optional +from typing import Optional import time @@ -53,7 +53,7 @@ def __init__( self.proc = None self.reference_id = hash(time.monotonic()) - self.cmd: List[str] = [] + self.cmd: list[str] = [] if privileged: self.cmd.append('sudo') diff --git a/podman/tests/unit/test_api_utils.py b/podman/tests/unit/test_api_utils.py index dcafc294..0635bcd3 100644 --- a/podman/tests/unit/test_api_utils.py +++ b/podman/tests/unit/test_api_utils.py @@ -3,7 +3,7 @@ import unittest from typing import Any, Optional from unittest import mock -from unittest.mock import Mock, mock_open, patch +from unittest.mock import mock_open, patch from dataclasses import dataclass @@ -22,10 +22,10 @@ class TestCase: TestCase(name="empty str", input="", expected=None), TestCase(name="str", input="reference=fedora", expected='{"reference": ["fedora"]}'), TestCase( - name="List[str]", input=["reference=fedora"], expected='{"reference": ["fedora"]}' + name="list[str]", input=["reference=fedora"], expected='{"reference": ["fedora"]}' ), TestCase( - name="Dict[str,str]", + name="dict[str,str]", input={"reference": "fedora"}, expected='{"reference": ["fedora"]}', ), diff --git a/podman/tests/unit/test_build.py b/podman/tests/unit/test_build.py index a12187ea..6adf0fe8 100644 --- a/podman/tests/unit/test_build.py +++ b/podman/tests/unit/test_build.py @@ -5,9 +5,9 @@ try: # Python >= 3.10 from collections.abc import Iterable -except: +except ImportError: # Python < 3.10 - from collections import Iterable + from collections.abc import Iterable from unittest.mock import patch import requests_mock diff --git a/podman/tests/unit/test_container.py b/podman/tests/unit/test_container.py index f2c41601..b38ea483 100644 --- a/podman/tests/unit/test_container.py +++ b/podman/tests/unit/test_container.py @@ -6,9 +6,9 @@ try: # Python >= 3.10 from collections.abc import Iterable -except: +except ImportError: # Python < 3.10 - from collections import Iterable + from collections.abc import Iterable import requests_mock @@ -119,7 +119,9 @@ def test_stats(self, mock): "Error": None, "Stats": [ { - "ContainerId": "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd", + "ContainerId": ( + "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd" + ), "Name": "evil_ptolemy", "CPU": 1000.0, } @@ -421,7 +423,8 @@ def test_top_with_streaming(self, mock): 'Mar01', '?', '00:00:01', - '/usr/bin/ssh-agent /bin/sh -c exec -l /bin/bash -c "/usr/bin/gnome-session"', + '/usr/bin/ssh-agent /bin/sh -c exec -l /bin/bash' + + '-c "/usr/bin/gnome-session"', ], ['jhonce', '5544', '3522', '0', 'Mar01', 'pts/1', '00:00:02', '-bash'], ['jhonce', '6140', '3522', '0', 'Mar01', 'pts/2', '00:00:00', '-bash'], diff --git a/podman/tests/unit/test_containersmanager.py b/podman/tests/unit/test_containersmanager.py index cb0ec167..f72ac226 100644 --- a/podman/tests/unit/test_containersmanager.py +++ b/podman/tests/unit/test_containersmanager.py @@ -4,11 +4,11 @@ try: # Python >= 3.10 from collections.abc import Iterator -except: +except ImportError: # Python < 3.10 - from collections import Iterator + from collections.abc import Iterator -from unittest.mock import ANY, DEFAULT, patch, MagicMock +from unittest.mock import DEFAULT, patch, MagicMock import requests_mock @@ -258,11 +258,11 @@ def test_create_parse_host_port(self, mock): self.assertEqual(expected_ports, actual_ports) def test_create_unsupported_key(self): - with self.assertRaises(TypeError) as e: + with self.assertRaises(TypeError): self.client.containers.create("fedora", "/usr/bin/ls", blkio_weight=100.0) def test_create_unknown_key(self): - with self.assertRaises(TypeError) as e: + with self.assertRaises(TypeError): self.client.containers.create("fedora", "/usr/bin/ls", unknown_key=100.0) @requests_mock.Mocker() diff --git a/podman/tests/unit/test_events.py b/podman/tests/unit/test_events.py index 2ac3a9a7..41638337 100644 --- a/podman/tests/unit/test_events.py +++ b/podman/tests/unit/test_events.py @@ -44,7 +44,7 @@ def test_list(self, mock): buffer.write(json.JSONEncoder().encode(item)) buffer.write("\n") - adapter = mock.get(tests.LIBPOD_URL + "/events", text=buffer.getvalue()) + adapter = mock.get(tests.LIBPOD_URL + "/events", text=buffer.getvalue()) # noqa: F841 manager = EventsManager(client=self.client.api) actual = manager.list(decode=True) diff --git a/podman/tests/unit/test_imagesmanager.py b/podman/tests/unit/test_imagesmanager.py index a98c22c6..22214d15 100644 --- a/podman/tests/unit/test_imagesmanager.py +++ b/podman/tests/unit/test_imagesmanager.py @@ -1,13 +1,13 @@ import types import unittest -from unittest.mock import mock_open, patch +from unittest.mock import patch try: # Python >= 3.10 from collections.abc import Iterable -except: +except ImportError: # Python < 3.10 - from collections import Iterable + from collections.abc import Iterable import requests_mock @@ -213,7 +213,8 @@ def test_prune_filters_label(self, mock): """Unit test filters param label for Images prune().""" mock.post( tests.LIBPOD_URL - + "/images/prune?filters=%7B%22label%22%3A+%5B%22%7B%27license%27%3A+%27Apache-2.0%27%7D%22%5D%7D", + + "/images/prune?filters=%7B%22label%22%3A+%5B%22%7B%27license%27%3A+" + + "%27Apache-2.0%27%7D%22%5D%7D", json=[ { "Id": "326dd9d7add24646a325e8eaa82125294027db2332e49c5828d96312c5d773ab", @@ -242,7 +243,8 @@ def test_prune_filters_not_label(self, mock): """Unit test filters param NOT-label for Images prune().""" mock.post( tests.LIBPOD_URL - + "/images/prune?filters=%7B%22label%21%22%3A+%5B%22%7B%27license%27%3A+%27Apache-2.0%27%7D%22%5D%7D", + + "/images/prune?filters=%7B%22label%21%22%3A+%5B%22%7B%27license%27%3A+" + + "%27Apache-2.0%27%7D%22%5D%7D", json=[ { "Id": "c4b16966ecd94ffa910eab4e630e24f259bf34a87e924cd4b1434f267b0e354e", @@ -666,7 +668,10 @@ def test_list_with_name_and_existing_filters(self, mock): """Test that name parameter works alongside other filters""" mock.get( tests.LIBPOD_URL - + "/images/json?filters=%7B%22dangling%22%3A+%5B%22True%22%5D%2C+%22reference%22%3A+%5B%22fedora%22%5D%7D", + + ( + "/images/json?filters=%7B%22dangling%22%3A+%5B%22True%22%5D%2C+" + "%22reference%22%3A+%5B%22fedora%22%5D%7D" + ), json=[FIRST_IMAGE], ) diff --git a/podman/tests/unit/test_networksmanager.py b/podman/tests/unit/test_networksmanager.py index 1219bb54..483a8510 100644 --- a/podman/tests/unit/test_networksmanager.py +++ b/podman/tests/unit/test_networksmanager.py @@ -171,6 +171,8 @@ def test_create_defaults(self, mock): adapter = mock.post(tests.LIBPOD_URL + "/networks/create", json=FIRST_NETWORK_LIBPOD) network = self.client.networks.create("podman") + self.assertIsInstance(network, Network) + self.assertEqual(adapter.call_count, 1) self.assertDictEqual( adapter.last_request.json(), diff --git a/podman/tests/unit/test_parse_utils.py b/podman/tests/unit/test_parse_utils.py index a7768deb..8d18124c 100644 --- a/podman/tests/unit/test_parse_utils.py +++ b/podman/tests/unit/test_parse_utils.py @@ -3,7 +3,8 @@ import json import unittest from dataclasses import dataclass -from typing import Any, Iterable, Optional, Tuple +from typing import Any, Optional +from collections.abc import Iterable from unittest import mock from requests import Response @@ -17,7 +18,7 @@ def test_parse_repository(self): class TestCase: name: str input: Any - expected: Tuple[str, Optional[str]] + expected: tuple[str, Optional[str]] cases = [ TestCase(name="empty str", input="", expected=("", None)), diff --git a/podman/tests/unit/test_path_utils.py b/podman/tests/unit/test_path_utils.py index eda2dd62..83ee217f 100644 --- a/podman/tests/unit/test_path_utils.py +++ b/podman/tests/unit/test_path_utils.py @@ -1,4 +1,3 @@ -import datetime import os import unittest import tempfile diff --git a/podman/tests/unit/test_podmanclient.py b/podman/tests/unit/test_podmanclient.py index 7456bad3..fdf8d344 100644 --- a/podman/tests/unit/test_podmanclient.py +++ b/podman/tests/unit/test_podmanclient.py @@ -59,7 +59,7 @@ def test_contextmanager(self, mock): "os": "linux", } } - adapter = mock.get(tests.LIBPOD_URL + "/info", json=body) + adapter = mock.get(tests.LIBPOD_URL + "/info", json=body) # noqa: F841 with PodmanClient(base_url=tests.BASE_SOCK) as client: actual = client.info() diff --git a/podman/tests/unit/test_podsmanager.py b/podman/tests/unit/test_podsmanager.py index 4512f8e6..fd919cb8 100644 --- a/podman/tests/unit/test_podsmanager.py +++ b/podman/tests/unit/test_podsmanager.py @@ -1,7 +1,7 @@ import io import json import unittest -from typing import Iterable +from collections.abc import Iterable import requests_mock diff --git a/rpm/python-podman.spec b/rpm/python-podman.spec index 75ff19e1..193c3fcd 100644 --- a/rpm/python-podman.spec +++ b/rpm/python-podman.spec @@ -81,10 +81,7 @@ export PBR_VERSION="0.0.0" %pyproject_save_files %{pypi_name} %endif -%if !%{defined rhel8_py} %check -%pyproject_check_import -e podman.api.typing_extensions -%endif %if %{defined rhel8_py} %files -n python%{python3_pkgversion}-%{pypi_name} diff --git a/ruff.toml b/ruff.toml index 02b60ac5..84bceb20 100644 --- a/ruff.toml +++ b/ruff.toml @@ -18,10 +18,10 @@ select = [ "E", # Pycodestyle Error "W", # Pycodestyle Warning "N", # PEP8 Naming - # TODO "UP", # Pyupgrade + "UP", # Pyupgrade # TODO "ANN", - # TODO "S", # Bandit - # "B", # Bugbear + "S", # Bandit + "B", # Bugbear "A", # flake-8-builtins "YTT", # flake-8-2020 "PLC", # Pylint Convention @@ -32,17 +32,16 @@ select = [ # to avoid changing too many lines ignore = [ "F821", # TODO Undefined name - "F541", # TODO f-string is missing placeholders - "F401", # TODO Module imported but unused - "F841", # TODO Local variable is assigned to but never used - "E402", # TODO Module level import not at top of file - "E741", # TODO ambiguous variable name - "E722", # TODO do not use bare 'except' - "E501", # TODO line too long + # Some Exceptions such as NotFound and NotFoundError can be ambiguous + # This change need to be performed with carefulness "N818", # TODO Error Suffix in exception name + # This can lead to API breaking changes so it's disabled for now "N80", # TODO Invalid Name - "ANN10", # Missing type annotation - "PLW2901", # TODO Redefined Loop Name + # TODO this error fails on one file and it's necessary to address + # the issue properly on a specific PR + "S108", + # TODO This is probably a false positive + "S603", ] [lint.per-file-ignores] "podman/tests/*.py" = ["S"] diff --git a/tox.ini b/tox.ini index 66e6288a..25169f8c 100644 --- a/tox.ini +++ b/tox.ini @@ -18,7 +18,7 @@ setenv = commands = {posargs} [testenv:lint] -depends = ruff +deps = ruff==0.8.1 allowlist_externals = ruff commands = ruff check --diff @@ -30,7 +30,7 @@ commands = coverage report -m --skip-covered --fail-under=80 --omit=podman/tests/* --omit=.tox/* [testenv:format] -deps = ruff +deps = ruff==0.8.1 allowlist_externals = ruff commands = ruff format --diff