diff --git a/_downloads/09b83b84e67dd3bf3bfa9889ef5464d2/linop-5.pdf b/_downloads/09b83b84e67dd3bf3bfa9889ef5464d2/linop-5.pdf index 357fc0f0..e073ccc7 100644 Binary files a/_downloads/09b83b84e67dd3bf3bfa9889ef5464d2/linop-5.pdf and b/_downloads/09b83b84e67dd3bf3bfa9889ef5464d2/linop-5.pdf differ diff --git a/_downloads/20331cddc327a7a39c725a1390312abd/linop-16_01.pdf b/_downloads/20331cddc327a7a39c725a1390312abd/linop-16_01.pdf index 4e2021ad..7ab1acf6 100644 Binary files a/_downloads/20331cddc327a7a39c725a1390312abd/linop-16_01.pdf and b/_downloads/20331cddc327a7a39c725a1390312abd/linop-16_01.pdf differ diff --git a/_downloads/22bd9057779cffbbbba3ea842436c066/linop-7_00.pdf b/_downloads/22bd9057779cffbbbba3ea842436c066/linop-7_00.pdf index 5c304f58..5370faf5 100644 Binary files a/_downloads/22bd9057779cffbbbba3ea842436c066/linop-7_00.pdf and b/_downloads/22bd9057779cffbbbba3ea842436c066/linop-7_00.pdf differ diff --git a/_downloads/274c9a03963857bb2702dadde78475d6/linop-18_02.pdf b/_downloads/274c9a03963857bb2702dadde78475d6/linop-18_02.pdf index 072d4626..3d7799c8 100644 Binary files a/_downloads/274c9a03963857bb2702dadde78475d6/linop-18_02.pdf and b/_downloads/274c9a03963857bb2702dadde78475d6/linop-18_02.pdf differ diff --git a/_downloads/2d83951329e9139d2f457f163ef557ff/linop-17_01.pdf b/_downloads/2d83951329e9139d2f457f163ef557ff/linop-17_01.pdf index fa8dbe90..5750920b 100644 Binary files a/_downloads/2d83951329e9139d2f457f163ef557ff/linop-17_01.pdf and b/_downloads/2d83951329e9139d2f457f163ef557ff/linop-17_01.pdf differ diff --git a/_downloads/38abfa68484834f661723510290a9594/sampler-1_01_00.pdf b/_downloads/38abfa68484834f661723510290a9594/sampler-1_01_00.pdf index d1a4f8d9..9503edd1 100644 Binary files a/_downloads/38abfa68484834f661723510290a9594/sampler-1_01_00.pdf and b/_downloads/38abfa68484834f661723510290a9594/sampler-1_01_00.pdf differ diff --git a/_downloads/3efa35127efc146021b2bb00f30126cc/linop-8_00.pdf b/_downloads/3efa35127efc146021b2bb00f30126cc/linop-8_00.pdf index e415a2a0..ee054148 100644 Binary files a/_downloads/3efa35127efc146021b2bb00f30126cc/linop-8_00.pdf and b/_downloads/3efa35127efc146021b2bb00f30126cc/linop-8_00.pdf differ diff --git a/_downloads/426d2a51889add0a821472e3363e6c86/linop-9_02.pdf b/_downloads/426d2a51889add0a821472e3363e6c86/linop-9_02.pdf index cc483863..412624b2 100644 Binary files a/_downloads/426d2a51889add0a821472e3363e6c86/linop-9_02.pdf and b/_downloads/426d2a51889add0a821472e3363e6c86/linop-9_02.pdf differ diff --git a/_downloads/46ba4351fee97ccd2e3d0aba89e7413d/linop-14.pdf b/_downloads/46ba4351fee97ccd2e3d0aba89e7413d/linop-14.pdf index 658526e8..d8b8ade2 100644 Binary files a/_downloads/46ba4351fee97ccd2e3d0aba89e7413d/linop-14.pdf and b/_downloads/46ba4351fee97ccd2e3d0aba89e7413d/linop-14.pdf differ diff --git a/_downloads/502950988dae13ff01ed4781361cfe4d/linop-18_03.pdf b/_downloads/502950988dae13ff01ed4781361cfe4d/linop-18_03.pdf index 6be0b0dc..88ccce7d 100644 Binary files a/_downloads/502950988dae13ff01ed4781361cfe4d/linop-18_03.pdf and b/_downloads/502950988dae13ff01ed4781361cfe4d/linop-18_03.pdf differ diff --git a/_downloads/5420c90dcdf9b097dbed8ebf47bc7a03/util-2.pdf b/_downloads/5420c90dcdf9b097dbed8ebf47bc7a03/util-2.pdf index 07b62360..9fc748ac 100644 Binary files a/_downloads/5420c90dcdf9b097dbed8ebf47bc7a03/util-2.pdf and b/_downloads/5420c90dcdf9b097dbed8ebf47bc7a03/util-2.pdf differ diff --git a/_downloads/5b19c3c62ae35eb4489c976153e230f2/linop-9_00.pdf b/_downloads/5b19c3c62ae35eb4489c976153e230f2/linop-9_00.pdf index d287764e..d1172219 100644 Binary files a/_downloads/5b19c3c62ae35eb4489c976153e230f2/linop-9_00.pdf and b/_downloads/5b19c3c62ae35eb4489c976153e230f2/linop-9_00.pdf differ diff --git a/_downloads/5be70851f96b004b61b84e14faba6d02/util-1.pdf b/_downloads/5be70851f96b004b61b84e14faba6d02/util-1.pdf index 64b52f80..99d0c3eb 100644 Binary files a/_downloads/5be70851f96b004b61b84e14faba6d02/util-1.pdf and b/_downloads/5be70851f96b004b61b84e14faba6d02/util-1.pdf differ diff --git a/_downloads/5d831190e971237b56d4c6f5001a1497/linop-1.pdf b/_downloads/5d831190e971237b56d4c6f5001a1497/linop-1.pdf index 5030e491..b83e35c1 100644 Binary files a/_downloads/5d831190e971237b56d4c6f5001a1497/linop-1.pdf and b/_downloads/5d831190e971237b56d4c6f5001a1497/linop-1.pdf differ diff --git a/_downloads/5ebda40b456c165df2ad890040597b6f/linop-15.pdf b/_downloads/5ebda40b456c165df2ad890040597b6f/linop-15.pdf index 79f50504..c71ee44f 100644 Binary files a/_downloads/5ebda40b456c165df2ad890040597b6f/linop-15.pdf and b/_downloads/5ebda40b456c165df2ad890040597b6f/linop-15.pdf differ diff --git a/_downloads/5f2c7960744f306a7f67e6f866544db8/abc-1.pdf b/_downloads/5f2c7960744f306a7f67e6f866544db8/abc-1.pdf index 5010b4bd..b1260782 100644 Binary files a/_downloads/5f2c7960744f306a7f67e6f866544db8/abc-1.pdf and b/_downloads/5f2c7960744f306a7f67e6f866544db8/abc-1.pdf differ diff --git a/_downloads/612022bbeb60e12bbedb2442f5e9eb14/linop-8_01.pdf b/_downloads/612022bbeb60e12bbedb2442f5e9eb14/linop-8_01.pdf index 480b9e74..df6a5fd1 100644 Binary files a/_downloads/612022bbeb60e12bbedb2442f5e9eb14/linop-8_01.pdf and b/_downloads/612022bbeb60e12bbedb2442f5e9eb14/linop-8_01.pdf differ diff --git a/_downloads/6f030212e1ed44374360d96af1b8bb08/sampler-1_01_00.png b/_downloads/6f030212e1ed44374360d96af1b8bb08/sampler-1_01_00.png index 366ddf11..2ddb1949 100644 Binary files a/_downloads/6f030212e1ed44374360d96af1b8bb08/sampler-1_01_00.png and b/_downloads/6f030212e1ed44374360d96af1b8bb08/sampler-1_01_00.png differ diff --git a/_downloads/720322c27d60e8bdfe2e26d9c52e0397/linop-11.pdf b/_downloads/720322c27d60e8bdfe2e26d9c52e0397/linop-11.pdf index 0984968d..a9fced7d 100644 Binary files a/_downloads/720322c27d60e8bdfe2e26d9c52e0397/linop-11.pdf and b/_downloads/720322c27d60e8bdfe2e26d9c52e0397/linop-11.pdf differ diff --git a/_downloads/76756524613d2983ea3587d04687c2c6/linop-7_01.pdf b/_downloads/76756524613d2983ea3587d04687c2c6/linop-7_01.pdf index b659fae4..bdcdf55a 100644 Binary files a/_downloads/76756524613d2983ea3587d04687c2c6/linop-7_01.pdf and b/_downloads/76756524613d2983ea3587d04687c2c6/linop-7_01.pdf differ diff --git a/_downloads/7bf4f564a15100201d0a1e7baafafa52/opt-solver-2.pdf b/_downloads/7bf4f564a15100201d0a1e7baafafa52/opt-solver-2.pdf index 5e64acc2..6e7df199 100644 Binary files a/_downloads/7bf4f564a15100201d0a1e7baafafa52/opt-solver-2.pdf and b/_downloads/7bf4f564a15100201d0a1e7baafafa52/opt-solver-2.pdf differ diff --git a/_downloads/7cceec4a3abf2947afcf0fbf1a95fbc2/linop-9_01.pdf b/_downloads/7cceec4a3abf2947afcf0fbf1a95fbc2/linop-9_01.pdf index b3a2e1f7..694e63f0 100644 Binary files a/_downloads/7cceec4a3abf2947afcf0fbf1a95fbc2/linop-9_01.pdf and b/_downloads/7cceec4a3abf2947afcf0fbf1a95fbc2/linop-9_01.pdf differ diff --git a/_downloads/7f5f34fa22392f9857f5e4557ab603eb/linop-18_01.pdf b/_downloads/7f5f34fa22392f9857f5e4557ab603eb/linop-18_01.pdf index 5d11bdbf..47ec071c 100644 Binary files a/_downloads/7f5f34fa22392f9857f5e4557ab603eb/linop-18_01.pdf and b/_downloads/7f5f34fa22392f9857f5e4557ab603eb/linop-18_01.pdf differ diff --git a/_downloads/8149e23614b7c5de54fdf55f5a7a75c5/linop-17_00.pdf b/_downloads/8149e23614b7c5de54fdf55f5a7a75c5/linop-17_00.pdf index 5b4c0b5f..9ff625f5 100644 Binary files a/_downloads/8149e23614b7c5de54fdf55f5a7a75c5/linop-17_00.pdf and b/_downloads/8149e23614b7c5de54fdf55f5a7a75c5/linop-17_00.pdf differ diff --git a/_downloads/8186afc1cad5ba418c82646f4dc4c85a/sampler-1_00_00.pdf b/_downloads/8186afc1cad5ba418c82646f4dc4c85a/sampler-1_00_00.pdf index 0f8dc330..f0f7df1d 100644 Binary files a/_downloads/8186afc1cad5ba418c82646f4dc4c85a/sampler-1_00_00.pdf and b/_downloads/8186afc1cad5ba418c82646f4dc4c85a/sampler-1_00_00.pdf differ diff --git a/_downloads/82d6d7024b395a55bfdcd60ed84361df/linop-16_02.pdf b/_downloads/82d6d7024b395a55bfdcd60ed84361df/linop-16_02.pdf index 2e028b5e..739e4eb6 100644 Binary files a/_downloads/82d6d7024b395a55bfdcd60ed84361df/linop-16_02.pdf and b/_downloads/82d6d7024b395a55bfdcd60ed84361df/linop-16_02.pdf differ diff --git a/_downloads/852396df1c2a68b4ab13ea440094a13b/linop-2.pdf b/_downloads/852396df1c2a68b4ab13ea440094a13b/linop-2.pdf index 048f2648..c9c1bde6 100644 Binary files a/_downloads/852396df1c2a68b4ab13ea440094a13b/linop-2.pdf and b/_downloads/852396df1c2a68b4ab13ea440094a13b/linop-2.pdf differ diff --git a/_downloads/88a3f16ef7fce8fdb35fd4edf3b6ace4/sampler-1_00_00.png b/_downloads/88a3f16ef7fce8fdb35fd4edf3b6ace4/sampler-1_00_00.png index 1aadf09c..d2e4f28a 100644 Binary files a/_downloads/88a3f16ef7fce8fdb35fd4edf3b6ace4/sampler-1_00_00.png and b/_downloads/88a3f16ef7fce8fdb35fd4edf3b6ace4/sampler-1_00_00.png differ diff --git a/_downloads/9c4ebc29020e065ee3bc3d0953afc0cb/sampler-1_01_00.hires.png b/_downloads/9c4ebc29020e065ee3bc3d0953afc0cb/sampler-1_01_00.hires.png index af0b4807..8a168173 100644 Binary files a/_downloads/9c4ebc29020e065ee3bc3d0953afc0cb/sampler-1_01_00.hires.png and b/_downloads/9c4ebc29020e065ee3bc3d0953afc0cb/sampler-1_01_00.hires.png differ diff --git a/_downloads/9eaea73fdea624e0206631a1d44ed0c6/linop-7_03.pdf b/_downloads/9eaea73fdea624e0206631a1d44ed0c6/linop-7_03.pdf index 4f88643f..c0097481 100644 Binary files a/_downloads/9eaea73fdea624e0206631a1d44ed0c6/linop-7_03.pdf and b/_downloads/9eaea73fdea624e0206631a1d44ed0c6/linop-7_03.pdf differ diff --git a/_downloads/b2643be9761ce6a5244d11d98408529f/linop-13.pdf b/_downloads/b2643be9761ce6a5244d11d98408529f/linop-13.pdf index af44779f..68e2bbad 100644 Binary files a/_downloads/b2643be9761ce6a5244d11d98408529f/linop-13.pdf and b/_downloads/b2643be9761ce6a5244d11d98408529f/linop-13.pdf differ diff --git a/_downloads/b793257364ec7e73fe09d6ac0cf897bb/linop-3.pdf b/_downloads/b793257364ec7e73fe09d6ac0cf897bb/linop-3.pdf index 87ec07d3..7a7f55bd 100644 Binary files a/_downloads/b793257364ec7e73fe09d6ac0cf897bb/linop-3.pdf and b/_downloads/b793257364ec7e73fe09d6ac0cf897bb/linop-3.pdf differ diff --git a/_downloads/c0190b5d57d3d151adb5651d8b8a1ee5/linop-10.pdf b/_downloads/c0190b5d57d3d151adb5651d8b8a1ee5/linop-10.pdf index 53487372..5737c965 100644 Binary files a/_downloads/c0190b5d57d3d151adb5651d8b8a1ee5/linop-10.pdf and b/_downloads/c0190b5d57d3d151adb5651d8b8a1ee5/linop-10.pdf differ diff --git a/_downloads/c604d2df9d37527a80c0dc91b76cf5db/sampler-1_00_00.hires.png b/_downloads/c604d2df9d37527a80c0dc91b76cf5db/sampler-1_00_00.hires.png index fe8ec311..33039249 100644 Binary files a/_downloads/c604d2df9d37527a80c0dc91b76cf5db/sampler-1_00_00.hires.png and b/_downloads/c604d2df9d37527a80c0dc91b76cf5db/sampler-1_00_00.hires.png differ diff --git a/_downloads/ce02986c52568fc946d170be6555d837/linop-18_00.pdf b/_downloads/ce02986c52568fc946d170be6555d837/linop-18_00.pdf index e52fce00..0c5d8dcd 100644 Binary files a/_downloads/ce02986c52568fc946d170be6555d837/linop-18_00.pdf and b/_downloads/ce02986c52568fc946d170be6555d837/linop-18_00.pdf differ diff --git a/_downloads/d722bee71da109d7780d99550eacebb8/linop-16_00.pdf b/_downloads/d722bee71da109d7780d99550eacebb8/linop-16_00.pdf index 7dbbcb13..f0d8ddcd 100644 Binary files a/_downloads/d722bee71da109d7780d99550eacebb8/linop-16_00.pdf and b/_downloads/d722bee71da109d7780d99550eacebb8/linop-16_00.pdf differ diff --git a/_downloads/db9ac5fbbfe21a45f644cc919f758da2/opt-solver-1.pdf b/_downloads/db9ac5fbbfe21a45f644cc919f758da2/opt-solver-1.pdf index 97a958db..393dc1b6 100644 Binary files a/_downloads/db9ac5fbbfe21a45f644cc919f758da2/opt-solver-1.pdf and b/_downloads/db9ac5fbbfe21a45f644cc919f758da2/opt-solver-1.pdf differ diff --git a/_downloads/e8cf3ce50f34b6ff0831d06aeebabca8/linop-6.pdf b/_downloads/e8cf3ce50f34b6ff0831d06aeebabca8/linop-6.pdf index 75483872..306c79c6 100644 Binary files a/_downloads/e8cf3ce50f34b6ff0831d06aeebabca8/linop-6.pdf and b/_downloads/e8cf3ce50f34b6ff0831d06aeebabca8/linop-6.pdf differ diff --git a/_downloads/ef375de8a555744bde8d66d2be4d2ecd/linop-4.pdf b/_downloads/ef375de8a555744bde8d66d2be4d2ecd/linop-4.pdf index 1132e8e3..00611df5 100644 Binary files a/_downloads/ef375de8a555744bde8d66d2be4d2ecd/linop-4.pdf and b/_downloads/ef375de8a555744bde8d66d2be4d2ecd/linop-4.pdf differ diff --git a/_downloads/f8ee9bb8e004715b22514fc6afbb3a71/linop-7_02.pdf b/_downloads/f8ee9bb8e004715b22514fc6afbb3a71/linop-7_02.pdf index 29c37d89..40270551 100644 Binary files a/_downloads/f8ee9bb8e004715b22514fc6afbb3a71/linop-7_02.pdf and b/_downloads/f8ee9bb8e004715b22514fc6afbb3a71/linop-7_02.pdf differ diff --git a/_images/sampler-1_00_00.png b/_images/sampler-1_00_00.png index 1aadf09c..d2e4f28a 100644 Binary files a/_images/sampler-1_00_00.png and b/_images/sampler-1_00_00.png differ diff --git a/_images/sampler-1_01_00.png b/_images/sampler-1_01_00.png index 366ddf11..2ddb1949 100644 Binary files a/_images/sampler-1_01_00.png and b/_images/sampler-1_01_00.png differ diff --git a/_modules/typing.html b/_modules/typing.html index 41126cad..52e4d4b1 100644 --- a/_modules/typing.html +++ b/_modules/typing.html @@ -1394,2557 +1394,2566 @@

Source code for typing

  927                globalns = getattr(
  928                    sys.modules.get(self.__forward_module__, None), '__dict__', globalns
  929                )
- 930            if type_params:
- 931                # "Inject" type parameters into the local namespace
- 932                # (unless they are shadowed by assignments *in* the local namespace),
- 933                # as a way of emulating annotation scopes when calling `eval()`
- 934                locals_to_pass = {param.__name__: param for param in type_params} | localns
- 935            else:
- 936                locals_to_pass = localns
- 937            type_ = _type_check(
- 938                eval(self.__forward_code__, globalns, locals_to_pass),
- 939                "Forward references must evaluate to types.",
- 940                is_argument=self.__forward_is_argument__,
- 941                allow_special_forms=self.__forward_is_class__,
- 942            )
- 943            self.__forward_value__ = _eval_type(
- 944                type_,
- 945                globalns,
- 946                localns,
- 947                type_params,
- 948                recursive_guard=(recursive_guard | {self.__forward_arg__}),
- 949            )
- 950            self.__forward_evaluated__ = True
- 951        return self.__forward_value__
- 952
- 953    def __eq__(self, other):
- 954        if not isinstance(other, ForwardRef):
- 955            return NotImplemented
- 956        if self.__forward_evaluated__ and other.__forward_evaluated__:
- 957            return (self.__forward_arg__ == other.__forward_arg__ and
- 958                    self.__forward_value__ == other.__forward_value__)
- 959        return (self.__forward_arg__ == other.__forward_arg__ and
- 960                self.__forward_module__ == other.__forward_module__)
+ 930
+ 931            # type parameters require some special handling,
+ 932            # as they exist in their own scope
+ 933            # but `eval()` does not have a dedicated parameter for that scope.
+ 934            # For classes, names in type parameter scopes should override
+ 935            # names in the global scope (which here are called `localns`!),
+ 936            # but should in turn be overridden by names in the class scope
+ 937            # (which here are called `globalns`!)
+ 938            if type_params:
+ 939                globalns, localns = dict(globalns), dict(localns)
+ 940                for param in type_params:
+ 941                    param_name = param.__name__
+ 942                    if not self.__forward_is_class__ or param_name not in globalns:
+ 943                        globalns[param_name] = param
+ 944                        localns.pop(param_name, None)
+ 945
+ 946            type_ = _type_check(
+ 947                eval(self.__forward_code__, globalns, localns),
+ 948                "Forward references must evaluate to types.",
+ 949                is_argument=self.__forward_is_argument__,
+ 950                allow_special_forms=self.__forward_is_class__,
+ 951            )
+ 952            self.__forward_value__ = _eval_type(
+ 953                type_,
+ 954                globalns,
+ 955                localns,
+ 956                type_params,
+ 957                recursive_guard=(recursive_guard | {self.__forward_arg__}),
+ 958            )
+ 959            self.__forward_evaluated__ = True
+ 960        return self.__forward_value__
  961
- 962    def __hash__(self):
- 963        return hash((self.__forward_arg__, self.__forward_module__))
- 964
- 965    def __or__(self, other):
- 966        return Union[self, other]
- 967
- 968    def __ror__(self, other):
- 969        return Union[other, self]
+ 962    def __eq__(self, other):
+ 963        if not isinstance(other, ForwardRef):
+ 964            return NotImplemented
+ 965        if self.__forward_evaluated__ and other.__forward_evaluated__:
+ 966            return (self.__forward_arg__ == other.__forward_arg__ and
+ 967                    self.__forward_value__ == other.__forward_value__)
+ 968        return (self.__forward_arg__ == other.__forward_arg__ and
+ 969                self.__forward_module__ == other.__forward_module__)
  970
- 971    def __repr__(self):
- 972        if self.__forward_module__ is None:
- 973            module_repr = ''
- 974        else:
- 975            module_repr = f', module={self.__forward_module__!r}'
- 976        return f'ForwardRef({self.__forward_arg__!r}{module_repr})'
- 977
- 978
- 979def _is_unpacked_typevartuple(x: Any) -> bool:
- 980    return ((not isinstance(x, type)) and
- 981            getattr(x, '__typing_is_unpacked_typevartuple__', False))
- 982
- 983
- 984def _is_typevar_like(x: Any) -> bool:
- 985    return isinstance(x, (TypeVar, ParamSpec)) or _is_unpacked_typevartuple(x)
+ 971    def __hash__(self):
+ 972        return hash((self.__forward_arg__, self.__forward_module__))
+ 973
+ 974    def __or__(self, other):
+ 975        return Union[self, other]
+ 976
+ 977    def __ror__(self, other):
+ 978        return Union[other, self]
+ 979
+ 980    def __repr__(self):
+ 981        if self.__forward_module__ is None:
+ 982            module_repr = ''
+ 983        else:
+ 984            module_repr = f', module={self.__forward_module__!r}'
+ 985        return f'ForwardRef({self.__forward_arg__!r}{module_repr})'
  986
  987
- 988class _PickleUsingNameMixin:
- 989    """Mixin enabling pickling based on self.__name__."""
- 990
- 991    def __reduce__(self):
- 992        return self.__name__
- 993
- 994
- 995def _typevar_subst(self, arg):
- 996    msg = "Parameters to generic types must be types."
- 997    arg = _type_check(arg, msg, is_argument=True)
- 998    if ((isinstance(arg, _GenericAlias) and arg.__origin__ is Unpack) or
- 999        (isinstance(arg, GenericAlias) and getattr(arg, '__unpacked__', False))):
-1000        raise TypeError(f"{arg} is not valid as type argument")
-1001    return arg
+ 988def _is_unpacked_typevartuple(x: Any) -> bool:
+ 989    return ((not isinstance(x, type)) and
+ 990            getattr(x, '__typing_is_unpacked_typevartuple__', False))
+ 991
+ 992
+ 993def _is_typevar_like(x: Any) -> bool:
+ 994    return isinstance(x, (TypeVar, ParamSpec)) or _is_unpacked_typevartuple(x)
+ 995
+ 996
+ 997class _PickleUsingNameMixin:
+ 998    """Mixin enabling pickling based on self.__name__."""
+ 999
+1000    def __reduce__(self):
+1001        return self.__name__
 1002
 1003
-1004def _typevartuple_prepare_subst(self, alias, args):
-1005    params = alias.__parameters__
-1006    typevartuple_index = params.index(self)
-1007    for param in params[typevartuple_index + 1:]:
-1008        if isinstance(param, TypeVarTuple):
-1009            raise TypeError(f"More than one TypeVarTuple parameter in {alias}")
-1010
-1011    alen = len(args)
-1012    plen = len(params)
-1013    left = typevartuple_index
-1014    right = plen - typevartuple_index - 1
-1015    var_tuple_index = None
-1016    fillarg = None
-1017    for k, arg in enumerate(args):
-1018        if not isinstance(arg, type):
-1019            subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
-1020            if subargs and len(subargs) == 2 and subargs[-1] is ...:
-1021                if var_tuple_index is not None:
-1022                    raise TypeError("More than one unpacked arbitrary-length tuple argument")
-1023                var_tuple_index = k
-1024                fillarg = subargs[0]
-1025    if var_tuple_index is not None:
-1026        left = min(left, var_tuple_index)
-1027        right = min(right, alen - var_tuple_index - 1)
-1028    elif left + right > alen:
-1029        raise TypeError(f"Too few arguments for {alias};"
-1030                        f" actual {alen}, expected at least {plen-1}")
-1031
-1032    return (
-1033        *args[:left],
-1034        *([fillarg]*(typevartuple_index - left)),
-1035        tuple(args[left: alen - right]),
-1036        *([fillarg]*(plen - right - left - typevartuple_index - 1)),
-1037        *args[alen - right:],
-1038    )
-1039
+1004def _typevar_subst(self, arg):
+1005    msg = "Parameters to generic types must be types."
+1006    arg = _type_check(arg, msg, is_argument=True)
+1007    if ((isinstance(arg, _GenericAlias) and arg.__origin__ is Unpack) or
+1008        (isinstance(arg, GenericAlias) and getattr(arg, '__unpacked__', False))):
+1009        raise TypeError(f"{arg} is not valid as type argument")
+1010    return arg
+1011
+1012
+1013def _typevartuple_prepare_subst(self, alias, args):
+1014    params = alias.__parameters__
+1015    typevartuple_index = params.index(self)
+1016    for param in params[typevartuple_index + 1:]:
+1017        if isinstance(param, TypeVarTuple):
+1018            raise TypeError(f"More than one TypeVarTuple parameter in {alias}")
+1019
+1020    alen = len(args)
+1021    plen = len(params)
+1022    left = typevartuple_index
+1023    right = plen - typevartuple_index - 1
+1024    var_tuple_index = None
+1025    fillarg = None
+1026    for k, arg in enumerate(args):
+1027        if not isinstance(arg, type):
+1028            subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+1029            if subargs and len(subargs) == 2 and subargs[-1] is ...:
+1030                if var_tuple_index is not None:
+1031                    raise TypeError("More than one unpacked arbitrary-length tuple argument")
+1032                var_tuple_index = k
+1033                fillarg = subargs[0]
+1034    if var_tuple_index is not None:
+1035        left = min(left, var_tuple_index)
+1036        right = min(right, alen - var_tuple_index - 1)
+1037    elif left + right > alen:
+1038        raise TypeError(f"Too few arguments for {alias};"
+1039                        f" actual {alen}, expected at least {plen-1}")
 1040
-1041def _paramspec_subst(self, arg):
-1042    if isinstance(arg, (list, tuple)):
-1043        arg = tuple(_type_check(a, "Expected a type.") for a in arg)
-1044    elif not _is_param_expr(arg):
-1045        raise TypeError(f"Expected a list of types, an ellipsis, "
-1046                        f"ParamSpec, or Concatenate. Got {arg}")
-1047    return arg
+1041    return (
+1042        *args[:left],
+1043        *([fillarg]*(typevartuple_index - left)),
+1044        tuple(args[left: alen - right]),
+1045        *([fillarg]*(plen - right - left - typevartuple_index - 1)),
+1046        *args[alen - right:],
+1047    )
 1048
 1049
-1050def _paramspec_prepare_subst(self, alias, args):
-1051    params = alias.__parameters__
-1052    i = params.index(self)
-1053    if i >= len(args):
-1054        raise TypeError(f"Too few arguments for {alias}")
-1055    # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
-1056    if len(params) == 1 and not _is_param_expr(args[0]):
-1057        assert i == 0
-1058        args = (args,)
-1059    # Convert lists to tuples to help other libraries cache the results.
-1060    elif isinstance(args[i], list):
-1061        args = (*args[:i], tuple(args[i]), *args[i+1:])
-1062    return args
-1063
-1064
-1065@_tp_cache
-1066def _generic_class_getitem(cls, params):
-1067    """Parameterizes a generic class.
-1068
-1069    At least, parameterizing a generic class is the *main* thing this method
-1070    does. For example, for some generic class `Foo`, this is called when we
-1071    do `Foo[int]` - there, with `cls=Foo` and `params=int`.
+1050def _paramspec_subst(self, arg):
+1051    if isinstance(arg, (list, tuple)):
+1052        arg = tuple(_type_check(a, "Expected a type.") for a in arg)
+1053    elif not _is_param_expr(arg):
+1054        raise TypeError(f"Expected a list of types, an ellipsis, "
+1055                        f"ParamSpec, or Concatenate. Got {arg}")
+1056    return arg
+1057
+1058
+1059def _paramspec_prepare_subst(self, alias, args):
+1060    params = alias.__parameters__
+1061    i = params.index(self)
+1062    if i >= len(args):
+1063        raise TypeError(f"Too few arguments for {alias}")
+1064    # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
+1065    if len(params) == 1 and not _is_param_expr(args[0]):
+1066        assert i == 0
+1067        args = (args,)
+1068    # Convert lists to tuples to help other libraries cache the results.
+1069    elif isinstance(args[i], list):
+1070        args = (*args[:i], tuple(args[i]), *args[i+1:])
+1071    return args
 1072
-1073    However, note that this method is also called when defining generic
-1074    classes in the first place with `class Foo(Generic[T]): ...`.
-1075    """
-1076    if not isinstance(params, tuple):
-1077        params = (params,)
-1078
-1079    params = tuple(_type_convert(p) for p in params)
-1080    is_generic_or_protocol = cls in (Generic, Protocol)
+1073
+1074@_tp_cache
+1075def _generic_class_getitem(cls, params):
+1076    """Parameterizes a generic class.
+1077
+1078    At least, parameterizing a generic class is the *main* thing this method
+1079    does. For example, for some generic class `Foo`, this is called when we
+1080    do `Foo[int]` - there, with `cls=Foo` and `params=int`.
 1081
-1082    if is_generic_or_protocol:
-1083        # Generic and Protocol can only be subscripted with unique type variables.
-1084        if not params:
-1085            raise TypeError(
-1086                f"Parameter list to {cls.__qualname__}[...] cannot be empty"
-1087            )
-1088        if not all(_is_typevar_like(p) for p in params):
-1089            raise TypeError(
-1090                f"Parameters to {cls.__name__}[...] must all be type variables "
-1091                f"or parameter specification variables.")
-1092        if len(set(params)) != len(params):
-1093            raise TypeError(
-1094                f"Parameters to {cls.__name__}[...] must all be unique")
-1095    else:
-1096        # Subscripting a regular Generic subclass.
-1097        for param in cls.__parameters__:
-1098            prepare = getattr(param, '__typing_prepare_subst__', None)
-1099            if prepare is not None:
-1100                params = prepare(cls, params)
-1101        _check_generic(cls, params, len(cls.__parameters__))
-1102
-1103        new_args = []
-1104        for param, new_arg in zip(cls.__parameters__, params):
-1105            if isinstance(param, TypeVarTuple):
-1106                new_args.extend(new_arg)
-1107            else:
-1108                new_args.append(new_arg)
-1109        params = tuple(new_args)
-1110
-1111    return _GenericAlias(cls, params)
-1112
-1113
-1114def _generic_init_subclass(cls, *args, **kwargs):
-1115    super(Generic, cls).__init_subclass__(*args, **kwargs)
-1116    tvars = []
-1117    if '__orig_bases__' in cls.__dict__:
-1118        error = Generic in cls.__orig_bases__
-1119    else:
-1120        error = (Generic in cls.__bases__ and
-1121                    cls.__name__ != 'Protocol' and
-1122                    type(cls) != _TypedDictMeta)
-1123    if error:
-1124        raise TypeError("Cannot inherit from plain Generic")
-1125    if '__orig_bases__' in cls.__dict__:
-1126        tvars = _collect_parameters(cls.__orig_bases__)
-1127        # Look for Generic[T1, ..., Tn].
-1128        # If found, tvars must be a subset of it.
-1129        # If not found, tvars is it.
-1130        # Also check for and reject plain Generic,
-1131        # and reject multiple Generic[...].
-1132        gvars = None
-1133        for base in cls.__orig_bases__:
-1134            if (isinstance(base, _GenericAlias) and
-1135                    base.__origin__ is Generic):
-1136                if gvars is not None:
-1137                    raise TypeError(
-1138                        "Cannot inherit from Generic[...] multiple times.")
-1139                gvars = base.__parameters__
-1140        if gvars is not None:
-1141            tvarset = set(tvars)
-1142            gvarset = set(gvars)
-1143            if not tvarset <= gvarset:
-1144                s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
-1145                s_args = ', '.join(str(g) for g in gvars)
-1146                raise TypeError(f"Some type variables ({s_vars}) are"
-1147                                f" not listed in Generic[{s_args}]")
-1148            tvars = gvars
-1149    cls.__parameters__ = tuple(tvars)
-1150
-1151
-1152def _is_dunder(attr):
-1153    return attr.startswith('__') and attr.endswith('__')
-1154
-1155class _BaseGenericAlias(_Final, _root=True):
-1156    """The central part of the internal API.
-1157
-1158    This represents a generic version of type 'origin' with type arguments 'params'.
-1159    There are two kind of these aliases: user defined and special. The special ones
-1160    are wrappers around builtin collections and ABCs in collections.abc. These must
-1161    have 'name' always set. If 'inst' is False, then the alias can't be instantiated;
-1162    this is used by e.g. typing.List and typing.Dict.
-1163    """
-1164
-1165    def __init__(self, origin, *, inst=True, name=None):
-1166        self._inst = inst
-1167        self._name = name
-1168        self.__origin__ = origin
-1169        self.__slots__ = None  # This is not documented.
-1170
-1171    def __call__(self, *args, **kwargs):
-1172        if not self._inst:
-1173            raise TypeError(f"Type {self._name} cannot be instantiated; "
-1174                            f"use {self.__origin__.__name__}() instead")
-1175        result = self.__origin__(*args, **kwargs)
-1176        try:
-1177            result.__orig_class__ = self
-1178        # Some objects raise TypeError (or something even more exotic)
-1179        # if you try to set attributes on them; we guard against that here
-1180        except Exception:
-1181            pass
-1182        return result
-1183
-1184    def __mro_entries__(self, bases):
-1185        res = []
-1186        if self.__origin__ not in bases:
-1187            res.append(self.__origin__)
-1188        i = bases.index(self)
-1189        for b in bases[i+1:]:
-1190            if isinstance(b, _BaseGenericAlias) or issubclass(b, Generic):
-1191                break
-1192        else:
-1193            res.append(Generic)
-1194        return tuple(res)
-1195
-1196    def __getattr__(self, attr):
-1197        if attr in {'__name__', '__qualname__'}:
-1198            return self._name or self.__origin__.__name__
-1199
-1200        # We are careful for copy and pickle.
-1201        # Also for simplicity we don't relay any dunder names
-1202        if '__origin__' in self.__dict__ and not _is_dunder(attr):
-1203            return getattr(self.__origin__, attr)
-1204        raise AttributeError(attr)
-1205
-1206    def __setattr__(self, attr, val):
-1207        if _is_dunder(attr) or attr in {'_name', '_inst', '_nparams'}:
-1208            super().__setattr__(attr, val)
-1209        else:
-1210            setattr(self.__origin__, attr, val)
-1211
-1212    def __instancecheck__(self, obj):
-1213        return self.__subclasscheck__(type(obj))
+1082    However, note that this method is also called when defining generic
+1083    classes in the first place with `class Foo(Generic[T]): ...`.
+1084    """
+1085    if not isinstance(params, tuple):
+1086        params = (params,)
+1087
+1088    params = tuple(_type_convert(p) for p in params)
+1089    is_generic_or_protocol = cls in (Generic, Protocol)
+1090
+1091    if is_generic_or_protocol:
+1092        # Generic and Protocol can only be subscripted with unique type variables.
+1093        if not params:
+1094            raise TypeError(
+1095                f"Parameter list to {cls.__qualname__}[...] cannot be empty"
+1096            )
+1097        if not all(_is_typevar_like(p) for p in params):
+1098            raise TypeError(
+1099                f"Parameters to {cls.__name__}[...] must all be type variables "
+1100                f"or parameter specification variables.")
+1101        if len(set(params)) != len(params):
+1102            raise TypeError(
+1103                f"Parameters to {cls.__name__}[...] must all be unique")
+1104    else:
+1105        # Subscripting a regular Generic subclass.
+1106        for param in cls.__parameters__:
+1107            prepare = getattr(param, '__typing_prepare_subst__', None)
+1108            if prepare is not None:
+1109                params = prepare(cls, params)
+1110        _check_generic(cls, params, len(cls.__parameters__))
+1111
+1112        new_args = []
+1113        for param, new_arg in zip(cls.__parameters__, params):
+1114            if isinstance(param, TypeVarTuple):
+1115                new_args.extend(new_arg)
+1116            else:
+1117                new_args.append(new_arg)
+1118        params = tuple(new_args)
+1119
+1120    return _GenericAlias(cls, params)
+1121
+1122
+1123def _generic_init_subclass(cls, *args, **kwargs):
+1124    super(Generic, cls).__init_subclass__(*args, **kwargs)
+1125    tvars = []
+1126    if '__orig_bases__' in cls.__dict__:
+1127        error = Generic in cls.__orig_bases__
+1128    else:
+1129        error = (Generic in cls.__bases__ and
+1130                    cls.__name__ != 'Protocol' and
+1131                    type(cls) != _TypedDictMeta)
+1132    if error:
+1133        raise TypeError("Cannot inherit from plain Generic")
+1134    if '__orig_bases__' in cls.__dict__:
+1135        tvars = _collect_parameters(cls.__orig_bases__)
+1136        # Look for Generic[T1, ..., Tn].
+1137        # If found, tvars must be a subset of it.
+1138        # If not found, tvars is it.
+1139        # Also check for and reject plain Generic,
+1140        # and reject multiple Generic[...].
+1141        gvars = None
+1142        for base in cls.__orig_bases__:
+1143            if (isinstance(base, _GenericAlias) and
+1144                    base.__origin__ is Generic):
+1145                if gvars is not None:
+1146                    raise TypeError(
+1147                        "Cannot inherit from Generic[...] multiple times.")
+1148                gvars = base.__parameters__
+1149        if gvars is not None:
+1150            tvarset = set(tvars)
+1151            gvarset = set(gvars)
+1152            if not tvarset <= gvarset:
+1153                s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
+1154                s_args = ', '.join(str(g) for g in gvars)
+1155                raise TypeError(f"Some type variables ({s_vars}) are"
+1156                                f" not listed in Generic[{s_args}]")
+1157            tvars = gvars
+1158    cls.__parameters__ = tuple(tvars)
+1159
+1160
+1161def _is_dunder(attr):
+1162    return attr.startswith('__') and attr.endswith('__')
+1163
+1164class _BaseGenericAlias(_Final, _root=True):
+1165    """The central part of the internal API.
+1166
+1167    This represents a generic version of type 'origin' with type arguments 'params'.
+1168    There are two kind of these aliases: user defined and special. The special ones
+1169    are wrappers around builtin collections and ABCs in collections.abc. These must
+1170    have 'name' always set. If 'inst' is False, then the alias can't be instantiated;
+1171    this is used by e.g. typing.List and typing.Dict.
+1172    """
+1173
+1174    def __init__(self, origin, *, inst=True, name=None):
+1175        self._inst = inst
+1176        self._name = name
+1177        self.__origin__ = origin
+1178        self.__slots__ = None  # This is not documented.
+1179
+1180    def __call__(self, *args, **kwargs):
+1181        if not self._inst:
+1182            raise TypeError(f"Type {self._name} cannot be instantiated; "
+1183                            f"use {self.__origin__.__name__}() instead")
+1184        result = self.__origin__(*args, **kwargs)
+1185        try:
+1186            result.__orig_class__ = self
+1187        # Some objects raise TypeError (or something even more exotic)
+1188        # if you try to set attributes on them; we guard against that here
+1189        except Exception:
+1190            pass
+1191        return result
+1192
+1193    def __mro_entries__(self, bases):
+1194        res = []
+1195        if self.__origin__ not in bases:
+1196            res.append(self.__origin__)
+1197        i = bases.index(self)
+1198        for b in bases[i+1:]:
+1199            if isinstance(b, _BaseGenericAlias) or issubclass(b, Generic):
+1200                break
+1201        else:
+1202            res.append(Generic)
+1203        return tuple(res)
+1204
+1205    def __getattr__(self, attr):
+1206        if attr in {'__name__', '__qualname__'}:
+1207            return self._name or self.__origin__.__name__
+1208
+1209        # We are careful for copy and pickle.
+1210        # Also for simplicity we don't relay any dunder names
+1211        if '__origin__' in self.__dict__ and not _is_dunder(attr):
+1212            return getattr(self.__origin__, attr)
+1213        raise AttributeError(attr)
 1214
-1215    def __subclasscheck__(self, cls):
-1216        raise TypeError("Subscripted generics cannot be used with"
-1217                        " class and instance checks")
-1218
-1219    def __dir__(self):
-1220        return list(set(super().__dir__()
-1221                + [attr for attr in dir(self.__origin__) if not _is_dunder(attr)]))
-1222
+1215    def __setattr__(self, attr, val):
+1216        if _is_dunder(attr) or attr in {'_name', '_inst', '_nparams'}:
+1217            super().__setattr__(attr, val)
+1218        else:
+1219            setattr(self.__origin__, attr, val)
+1220
+1221    def __instancecheck__(self, obj):
+1222        return self.__subclasscheck__(type(obj))
 1223
-1224# Special typing constructs Union, Optional, Generic, Callable and Tuple
-1225# use three special attributes for internal bookkeeping of generic types:
-1226# * __parameters__ is a tuple of unique free type parameters of a generic
-1227#   type, for example, Dict[T, T].__parameters__ == (T,);
-1228# * __origin__ keeps a reference to a type that was subscripted,
-1229#   e.g., Union[T, int].__origin__ == Union, or the non-generic version of
-1230#   the type.
-1231# * __args__ is a tuple of all arguments used in subscripting,
-1232#   e.g., Dict[T, int].__args__ == (T, int).
-1233
-1234
-1235class _GenericAlias(_BaseGenericAlias, _root=True):
-1236    # The type of parameterized generics.
-1237    #
-1238    # That is, for example, `type(List[int])` is `_GenericAlias`.
-1239    #
-1240    # Objects which are instances of this class include:
-1241    # * Parameterized container types, e.g. `Tuple[int]`, `List[int]`.
-1242    #  * Note that native container types, e.g. `tuple`, `list`, use
-1243    #    `types.GenericAlias` instead.
-1244    # * Parameterized classes:
-1245    #     class C[T]: pass
-1246    #     # C[int] is a _GenericAlias
-1247    # * `Callable` aliases, generic `Callable` aliases, and
-1248    #   parameterized `Callable` aliases:
-1249    #     T = TypeVar('T')
-1250    #     # _CallableGenericAlias inherits from _GenericAlias.
-1251    #     A = Callable[[], None]  # _CallableGenericAlias
-1252    #     B = Callable[[T], None]  # _CallableGenericAlias
-1253    #     C = B[int]  # _CallableGenericAlias
-1254    # * Parameterized `Final`, `ClassVar` and `TypeGuard`:
-1255    #     # All _GenericAlias
-1256    #     Final[int]
-1257    #     ClassVar[float]
-1258    #     TypeVar[bool]
-1259
-1260    def __init__(self, origin, args, *, inst=True, name=None):
-1261        super().__init__(origin, inst=inst, name=name)
-1262        if not isinstance(args, tuple):
-1263            args = (args,)
-1264        self.__args__ = tuple(... if a is _TypingEllipsis else
-1265                              a for a in args)
-1266        self.__parameters__ = _collect_parameters(args)
-1267        if not name:
-1268            self.__module__ = origin.__module__
-1269
-1270    def __eq__(self, other):
-1271        if not isinstance(other, _GenericAlias):
-1272            return NotImplemented
-1273        return (self.__origin__ == other.__origin__
-1274                and self.__args__ == other.__args__)
-1275
-1276    def __hash__(self):
-1277        return hash((self.__origin__, self.__args__))
+1224    def __subclasscheck__(self, cls):
+1225        raise TypeError("Subscripted generics cannot be used with"
+1226                        " class and instance checks")
+1227
+1228    def __dir__(self):
+1229        return list(set(super().__dir__()
+1230                + [attr for attr in dir(self.__origin__) if not _is_dunder(attr)]))
+1231
+1232
+1233# Special typing constructs Union, Optional, Generic, Callable and Tuple
+1234# use three special attributes for internal bookkeeping of generic types:
+1235# * __parameters__ is a tuple of unique free type parameters of a generic
+1236#   type, for example, Dict[T, T].__parameters__ == (T,);
+1237# * __origin__ keeps a reference to a type that was subscripted,
+1238#   e.g., Union[T, int].__origin__ == Union, or the non-generic version of
+1239#   the type.
+1240# * __args__ is a tuple of all arguments used in subscripting,
+1241#   e.g., Dict[T, int].__args__ == (T, int).
+1242
+1243
+1244class _GenericAlias(_BaseGenericAlias, _root=True):
+1245    # The type of parameterized generics.
+1246    #
+1247    # That is, for example, `type(List[int])` is `_GenericAlias`.
+1248    #
+1249    # Objects which are instances of this class include:
+1250    # * Parameterized container types, e.g. `Tuple[int]`, `List[int]`.
+1251    #  * Note that native container types, e.g. `tuple`, `list`, use
+1252    #    `types.GenericAlias` instead.
+1253    # * Parameterized classes:
+1254    #     class C[T]: pass
+1255    #     # C[int] is a _GenericAlias
+1256    # * `Callable` aliases, generic `Callable` aliases, and
+1257    #   parameterized `Callable` aliases:
+1258    #     T = TypeVar('T')
+1259    #     # _CallableGenericAlias inherits from _GenericAlias.
+1260    #     A = Callable[[], None]  # _CallableGenericAlias
+1261    #     B = Callable[[T], None]  # _CallableGenericAlias
+1262    #     C = B[int]  # _CallableGenericAlias
+1263    # * Parameterized `Final`, `ClassVar` and `TypeGuard`:
+1264    #     # All _GenericAlias
+1265    #     Final[int]
+1266    #     ClassVar[float]
+1267    #     TypeVar[bool]
+1268
+1269    def __init__(self, origin, args, *, inst=True, name=None):
+1270        super().__init__(origin, inst=inst, name=name)
+1271        if not isinstance(args, tuple):
+1272            args = (args,)
+1273        self.__args__ = tuple(... if a is _TypingEllipsis else
+1274                              a for a in args)
+1275        self.__parameters__ = _collect_parameters(args)
+1276        if not name:
+1277            self.__module__ = origin.__module__
 1278
-1279    def __or__(self, right):
-1280        return Union[self, right]
-1281
-1282    def __ror__(self, left):
-1283        return Union[left, self]
+1279    def __eq__(self, other):
+1280        if not isinstance(other, _GenericAlias):
+1281            return NotImplemented
+1282        return (self.__origin__ == other.__origin__
+1283                and self.__args__ == other.__args__)
 1284
-1285    @_tp_cache
-1286    def __getitem__(self, args):
-1287        # Parameterizes an already-parameterized object.
-1288        #
-1289        # For example, we arrive here doing something like:
-1290        #   T1 = TypeVar('T1')
-1291        #   T2 = TypeVar('T2')
-1292        #   T3 = TypeVar('T3')
-1293        #   class A(Generic[T1]): pass
-1294        #   B = A[T2]  # B is a _GenericAlias
-1295        #   C = B[T3]  # Invokes _GenericAlias.__getitem__
-1296        #
-1297        # We also arrive here when parameterizing a generic `Callable` alias:
-1298        #   T = TypeVar('T')
-1299        #   C = Callable[[T], None]
-1300        #   C[int]  # Invokes _GenericAlias.__getitem__
-1301
-1302        if self.__origin__ in (Generic, Protocol):
-1303            # Can't subscript Generic[...] or Protocol[...].
-1304            raise TypeError(f"Cannot subscript already-subscripted {self}")
-1305        if not self.__parameters__:
-1306            raise TypeError(f"{self} is not a generic class")
-1307
-1308        # Preprocess `args`.
-1309        if not isinstance(args, tuple):
-1310            args = (args,)
-1311        args = tuple(_type_convert(p) for p in args)
-1312        args = _unpack_args(args)
-1313        new_args = self._determine_new_args(args)
-1314        r = self.copy_with(new_args)
-1315        return r
+1285    def __hash__(self):
+1286        return hash((self.__origin__, self.__args__))
+1287
+1288    def __or__(self, right):
+1289        return Union[self, right]
+1290
+1291    def __ror__(self, left):
+1292        return Union[left, self]
+1293
+1294    @_tp_cache
+1295    def __getitem__(self, args):
+1296        # Parameterizes an already-parameterized object.
+1297        #
+1298        # For example, we arrive here doing something like:
+1299        #   T1 = TypeVar('T1')
+1300        #   T2 = TypeVar('T2')
+1301        #   T3 = TypeVar('T3')
+1302        #   class A(Generic[T1]): pass
+1303        #   B = A[T2]  # B is a _GenericAlias
+1304        #   C = B[T3]  # Invokes _GenericAlias.__getitem__
+1305        #
+1306        # We also arrive here when parameterizing a generic `Callable` alias:
+1307        #   T = TypeVar('T')
+1308        #   C = Callable[[T], None]
+1309        #   C[int]  # Invokes _GenericAlias.__getitem__
+1310
+1311        if self.__origin__ in (Generic, Protocol):
+1312            # Can't subscript Generic[...] or Protocol[...].
+1313            raise TypeError(f"Cannot subscript already-subscripted {self}")
+1314        if not self.__parameters__:
+1315            raise TypeError(f"{self} is not a generic class")
 1316
-1317    def _determine_new_args(self, args):
-1318        # Determines new __args__ for __getitem__.
-1319        #
-1320        # For example, suppose we had:
-1321        #   T1 = TypeVar('T1')
-1322        #   T2 = TypeVar('T2')
-1323        #   class A(Generic[T1, T2]): pass
-1324        #   T3 = TypeVar('T3')
-1325        #   B = A[int, T3]
-1326        #   C = B[str]
-1327        # `B.__args__` is `(int, T3)`, so `C.__args__` should be `(int, str)`.
-1328        # Unfortunately, this is harder than it looks, because if `T3` is
-1329        # anything more exotic than a plain `TypeVar`, we need to consider
-1330        # edge cases.
-1331
-1332        params = self.__parameters__
-1333        # In the example above, this would be {T3: str}
-1334        for param in params:
-1335            prepare = getattr(param, '__typing_prepare_subst__', None)
-1336            if prepare is not None:
-1337                args = prepare(self, args)
-1338        alen = len(args)
-1339        plen = len(params)
-1340        if alen != plen:
-1341            raise TypeError(f"Too {'many' if alen > plen else 'few'} arguments for {self};"
-1342                            f" actual {alen}, expected {plen}")
-1343        new_arg_by_param = dict(zip(params, args))
-1344        return tuple(self._make_substitution(self.__args__, new_arg_by_param))
-1345
-1346    def _make_substitution(self, args, new_arg_by_param):
-1347        """Create a list of new type arguments."""
-1348        new_args = []
-1349        for old_arg in args:
-1350            if isinstance(old_arg, type):
-1351                new_args.append(old_arg)
-1352                continue
-1353
-1354            substfunc = getattr(old_arg, '__typing_subst__', None)
-1355            if substfunc:
-1356                new_arg = substfunc(new_arg_by_param[old_arg])
-1357            else:
-1358                subparams = getattr(old_arg, '__parameters__', ())
-1359                if not subparams:
-1360                    new_arg = old_arg
-1361                else:
-1362                    subargs = []
-1363                    for x in subparams:
-1364                        if isinstance(x, TypeVarTuple):
-1365                            subargs.extend(new_arg_by_param[x])
-1366                        else:
-1367                            subargs.append(new_arg_by_param[x])
-1368                    new_arg = old_arg[tuple(subargs)]
-1369
-1370            if self.__origin__ == collections.abc.Callable and isinstance(new_arg, tuple):
-1371                # Consider the following `Callable`.
-1372                #   C = Callable[[int], str]
-1373                # Here, `C.__args__` should be (int, str) - NOT ([int], str).
-1374                # That means that if we had something like...
-1375                #   P = ParamSpec('P')
-1376                #   T = TypeVar('T')
-1377                #   C = Callable[P, T]
-1378                #   D = C[[int, str], float]
-1379                # ...we need to be careful; `new_args` should end up as
-1380                # `(int, str, float)` rather than `([int, str], float)`.
-1381                new_args.extend(new_arg)
-1382            elif _is_unpacked_typevartuple(old_arg):
-1383                # Consider the following `_GenericAlias`, `B`:
-1384                #   class A(Generic[*Ts]): ...
-1385                #   B = A[T, *Ts]
-1386                # If we then do:
-1387                #   B[float, int, str]
-1388                # The `new_arg` corresponding to `T` will be `float`, and the
-1389                # `new_arg` corresponding to `*Ts` will be `(int, str)`. We
-1390                # should join all these types together in a flat list
-1391                # `(float, int, str)` - so again, we should `extend`.
-1392                new_args.extend(new_arg)
-1393            elif isinstance(old_arg, tuple):
-1394                # Corner case:
-1395                #    P = ParamSpec('P')
-1396                #    T = TypeVar('T')
-1397                #    class Base(Generic[P]): ...
-1398                # Can be substituted like this:
-1399                #    X = Base[[int, T]]
-1400                # In this case, `old_arg` will be a tuple:
-1401                new_args.append(
-1402                    tuple(self._make_substitution(old_arg, new_arg_by_param)),
-1403                )
-1404            else:
-1405                new_args.append(new_arg)
-1406        return new_args
-1407
-1408    def copy_with(self, args):
-1409        return self.__class__(self.__origin__, args, name=self._name, inst=self._inst)
-1410
-1411    def __repr__(self):
-1412        if self._name:
-1413            name = 'typing.' + self._name
-1414        else:
-1415            name = _type_repr(self.__origin__)
-1416        if self.__args__:
-1417            args = ", ".join([_type_repr(a) for a in self.__args__])
-1418        else:
-1419            # To ensure the repr is eval-able.
-1420            args = "()"
-1421        return f'{name}[{args}]'
-1422
-1423    def __reduce__(self):
-1424        if self._name:
-1425            origin = globals()[self._name]
-1426        else:
-1427            origin = self.__origin__
-1428        args = tuple(self.__args__)
-1429        if len(args) == 1 and not isinstance(args[0], tuple):
-1430            args, = args
-1431        return operator.getitem, (origin, args)
-1432
-1433    def __mro_entries__(self, bases):
-1434        if isinstance(self.__origin__, _SpecialForm):
-1435            raise TypeError(f"Cannot subclass {self!r}")
-1436
-1437        if self._name:  # generic version of an ABC or built-in class
-1438            return super().__mro_entries__(bases)
-1439        if self.__origin__ is Generic:
-1440            if Protocol in bases:
-1441                return ()
-1442            i = bases.index(self)
-1443            for b in bases[i+1:]:
-1444                if isinstance(b, _BaseGenericAlias) and b is not self:
-1445                    return ()
-1446        return (self.__origin__,)
-1447
-1448    def __iter__(self):
-1449        yield Unpack[self]
-1450
-1451
-1452# _nparams is the number of accepted parameters, e.g. 0 for Hashable,
-1453# 1 for List and 2 for Dict.  It may be -1 if variable number of
-1454# parameters are accepted (needs custom __getitem__).
-1455
-1456class _SpecialGenericAlias(_NotIterable, _BaseGenericAlias, _root=True):
-1457    def __init__(self, origin, nparams, *, inst=True, name=None):
-1458        if name is None:
-1459            name = origin.__name__
-1460        super().__init__(origin, inst=inst, name=name)
-1461        self._nparams = nparams
-1462        if origin.__module__ == 'builtins':
-1463            self.__doc__ = f'A generic version of {origin.__qualname__}.'
-1464        else:
-1465            self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.'
-1466
-1467    @_tp_cache
-1468    def __getitem__(self, params):
-1469        if not isinstance(params, tuple):
-1470            params = (params,)
-1471        msg = "Parameters to generic types must be types."
-1472        params = tuple(_type_check(p, msg) for p in params)
-1473        _check_generic(self, params, self._nparams)
-1474        return self.copy_with(params)
+1317        # Preprocess `args`.
+1318        if not isinstance(args, tuple):
+1319            args = (args,)
+1320        args = tuple(_type_convert(p) for p in args)
+1321        args = _unpack_args(args)
+1322        new_args = self._determine_new_args(args)
+1323        r = self.copy_with(new_args)
+1324        return r
+1325
+1326    def _determine_new_args(self, args):
+1327        # Determines new __args__ for __getitem__.
+1328        #
+1329        # For example, suppose we had:
+1330        #   T1 = TypeVar('T1')
+1331        #   T2 = TypeVar('T2')
+1332        #   class A(Generic[T1, T2]): pass
+1333        #   T3 = TypeVar('T3')
+1334        #   B = A[int, T3]
+1335        #   C = B[str]
+1336        # `B.__args__` is `(int, T3)`, so `C.__args__` should be `(int, str)`.
+1337        # Unfortunately, this is harder than it looks, because if `T3` is
+1338        # anything more exotic than a plain `TypeVar`, we need to consider
+1339        # edge cases.
+1340
+1341        params = self.__parameters__
+1342        # In the example above, this would be {T3: str}
+1343        for param in params:
+1344            prepare = getattr(param, '__typing_prepare_subst__', None)
+1345            if prepare is not None:
+1346                args = prepare(self, args)
+1347        alen = len(args)
+1348        plen = len(params)
+1349        if alen != plen:
+1350            raise TypeError(f"Too {'many' if alen > plen else 'few'} arguments for {self};"
+1351                            f" actual {alen}, expected {plen}")
+1352        new_arg_by_param = dict(zip(params, args))
+1353        return tuple(self._make_substitution(self.__args__, new_arg_by_param))
+1354
+1355    def _make_substitution(self, args, new_arg_by_param):
+1356        """Create a list of new type arguments."""
+1357        new_args = []
+1358        for old_arg in args:
+1359            if isinstance(old_arg, type):
+1360                new_args.append(old_arg)
+1361                continue
+1362
+1363            substfunc = getattr(old_arg, '__typing_subst__', None)
+1364            if substfunc:
+1365                new_arg = substfunc(new_arg_by_param[old_arg])
+1366            else:
+1367                subparams = getattr(old_arg, '__parameters__', ())
+1368                if not subparams:
+1369                    new_arg = old_arg
+1370                else:
+1371                    subargs = []
+1372                    for x in subparams:
+1373                        if isinstance(x, TypeVarTuple):
+1374                            subargs.extend(new_arg_by_param[x])
+1375                        else:
+1376                            subargs.append(new_arg_by_param[x])
+1377                    new_arg = old_arg[tuple(subargs)]
+1378
+1379            if self.__origin__ == collections.abc.Callable and isinstance(new_arg, tuple):
+1380                # Consider the following `Callable`.
+1381                #   C = Callable[[int], str]
+1382                # Here, `C.__args__` should be (int, str) - NOT ([int], str).
+1383                # That means that if we had something like...
+1384                #   P = ParamSpec('P')
+1385                #   T = TypeVar('T')
+1386                #   C = Callable[P, T]
+1387                #   D = C[[int, str], float]
+1388                # ...we need to be careful; `new_args` should end up as
+1389                # `(int, str, float)` rather than `([int, str], float)`.
+1390                new_args.extend(new_arg)
+1391            elif _is_unpacked_typevartuple(old_arg):
+1392                # Consider the following `_GenericAlias`, `B`:
+1393                #   class A(Generic[*Ts]): ...
+1394                #   B = A[T, *Ts]
+1395                # If we then do:
+1396                #   B[float, int, str]
+1397                # The `new_arg` corresponding to `T` will be `float`, and the
+1398                # `new_arg` corresponding to `*Ts` will be `(int, str)`. We
+1399                # should join all these types together in a flat list
+1400                # `(float, int, str)` - so again, we should `extend`.
+1401                new_args.extend(new_arg)
+1402            elif isinstance(old_arg, tuple):
+1403                # Corner case:
+1404                #    P = ParamSpec('P')
+1405                #    T = TypeVar('T')
+1406                #    class Base(Generic[P]): ...
+1407                # Can be substituted like this:
+1408                #    X = Base[[int, T]]
+1409                # In this case, `old_arg` will be a tuple:
+1410                new_args.append(
+1411                    tuple(self._make_substitution(old_arg, new_arg_by_param)),
+1412                )
+1413            else:
+1414                new_args.append(new_arg)
+1415        return new_args
+1416
+1417    def copy_with(self, args):
+1418        return self.__class__(self.__origin__, args, name=self._name, inst=self._inst)
+1419
+1420    def __repr__(self):
+1421        if self._name:
+1422            name = 'typing.' + self._name
+1423        else:
+1424            name = _type_repr(self.__origin__)
+1425        if self.__args__:
+1426            args = ", ".join([_type_repr(a) for a in self.__args__])
+1427        else:
+1428            # To ensure the repr is eval-able.
+1429            args = "()"
+1430        return f'{name}[{args}]'
+1431
+1432    def __reduce__(self):
+1433        if self._name:
+1434            origin = globals()[self._name]
+1435        else:
+1436            origin = self.__origin__
+1437        args = tuple(self.__args__)
+1438        if len(args) == 1 and not isinstance(args[0], tuple):
+1439            args, = args
+1440        return operator.getitem, (origin, args)
+1441
+1442    def __mro_entries__(self, bases):
+1443        if isinstance(self.__origin__, _SpecialForm):
+1444            raise TypeError(f"Cannot subclass {self!r}")
+1445
+1446        if self._name:  # generic version of an ABC or built-in class
+1447            return super().__mro_entries__(bases)
+1448        if self.__origin__ is Generic:
+1449            if Protocol in bases:
+1450                return ()
+1451            i = bases.index(self)
+1452            for b in bases[i+1:]:
+1453                if isinstance(b, _BaseGenericAlias) and b is not self:
+1454                    return ()
+1455        return (self.__origin__,)
+1456
+1457    def __iter__(self):
+1458        yield Unpack[self]
+1459
+1460
+1461# _nparams is the number of accepted parameters, e.g. 0 for Hashable,
+1462# 1 for List and 2 for Dict.  It may be -1 if variable number of
+1463# parameters are accepted (needs custom __getitem__).
+1464
+1465class _SpecialGenericAlias(_NotIterable, _BaseGenericAlias, _root=True):
+1466    def __init__(self, origin, nparams, *, inst=True, name=None):
+1467        if name is None:
+1468            name = origin.__name__
+1469        super().__init__(origin, inst=inst, name=name)
+1470        self._nparams = nparams
+1471        if origin.__module__ == 'builtins':
+1472            self.__doc__ = f'A generic version of {origin.__qualname__}.'
+1473        else:
+1474            self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.'
 1475
-1476    def copy_with(self, params):
-1477        return _GenericAlias(self.__origin__, params,
-1478                             name=self._name, inst=self._inst)
-1479
-1480    def __repr__(self):
-1481        return 'typing.' + self._name
-1482
-1483    def __subclasscheck__(self, cls):
-1484        if isinstance(cls, _SpecialGenericAlias):
-1485            return issubclass(cls.__origin__, self.__origin__)
-1486        if not isinstance(cls, _GenericAlias):
-1487            return issubclass(cls, self.__origin__)
-1488        return super().__subclasscheck__(cls)
-1489
-1490    def __reduce__(self):
-1491        return self._name
-1492
-1493    def __or__(self, right):
-1494        return Union[self, right]
-1495
-1496    def __ror__(self, left):
-1497        return Union[left, self]
+1476    @_tp_cache
+1477    def __getitem__(self, params):
+1478        if not isinstance(params, tuple):
+1479            params = (params,)
+1480        msg = "Parameters to generic types must be types."
+1481        params = tuple(_type_check(p, msg) for p in params)
+1482        _check_generic(self, params, self._nparams)
+1483        return self.copy_with(params)
+1484
+1485    def copy_with(self, params):
+1486        return _GenericAlias(self.__origin__, params,
+1487                             name=self._name, inst=self._inst)
+1488
+1489    def __repr__(self):
+1490        return 'typing.' + self._name
+1491
+1492    def __subclasscheck__(self, cls):
+1493        if isinstance(cls, _SpecialGenericAlias):
+1494            return issubclass(cls.__origin__, self.__origin__)
+1495        if not isinstance(cls, _GenericAlias):
+1496            return issubclass(cls, self.__origin__)
+1497        return super().__subclasscheck__(cls)
 1498
-1499
-1500class _DeprecatedGenericAlias(_SpecialGenericAlias, _root=True):
-1501    def __init__(
-1502        self, origin, nparams, *, removal_version, inst=True, name=None
-1503    ):
-1504        super().__init__(origin, nparams, inst=inst, name=name)
-1505        self._removal_version = removal_version
-1506
-1507    def __instancecheck__(self, inst):
-1508        import warnings
-1509        warnings._deprecated(
-1510            f"{self.__module__}.{self._name}", remove=self._removal_version
-1511        )
-1512        return super().__instancecheck__(inst)
-1513
-1514
-1515class _CallableGenericAlias(_NotIterable, _GenericAlias, _root=True):
-1516    def __repr__(self):
-1517        assert self._name == 'Callable'
-1518        args = self.__args__
-1519        if len(args) == 2 and _is_param_expr(args[0]):
-1520            return super().__repr__()
-1521        return (f'typing.Callable'
-1522                f'[[{", ".join([_type_repr(a) for a in args[:-1]])}], '
-1523                f'{_type_repr(args[-1])}]')
-1524
-1525    def __reduce__(self):
-1526        args = self.__args__
-1527        if not (len(args) == 2 and _is_param_expr(args[0])):
-1528            args = list(args[:-1]), args[-1]
-1529        return operator.getitem, (Callable, args)
-1530
-1531
-1532class _CallableType(_SpecialGenericAlias, _root=True):
-1533    def copy_with(self, params):
-1534        return _CallableGenericAlias(self.__origin__, params,
-1535                                     name=self._name, inst=self._inst)
-1536
-1537    def __getitem__(self, params):
-1538        if not isinstance(params, tuple) or len(params) != 2:
-1539            raise TypeError("Callable must be used as "
-1540                            "Callable[[arg, ...], result].")
-1541        args, result = params
-1542        # This relaxes what args can be on purpose to allow things like
-1543        # PEP 612 ParamSpec.  Responsibility for whether a user is using
-1544        # Callable[...] properly is deferred to static type checkers.
-1545        if isinstance(args, list):
-1546            params = (tuple(args), result)
-1547        else:
-1548            params = (args, result)
-1549        return self.__getitem_inner__(params)
-1550
-1551    @_tp_cache
-1552    def __getitem_inner__(self, params):
-1553        args, result = params
-1554        msg = "Callable[args, result]: result must be a type."
-1555        result = _type_check(result, msg)
-1556        if args is Ellipsis:
-1557            return self.copy_with((_TypingEllipsis, result))
-1558        if not isinstance(args, tuple):
-1559            args = (args,)
-1560        args = tuple(_type_convert(arg) for arg in args)
-1561        params = args + (result,)
-1562        return self.copy_with(params)
-1563
-1564
-1565class _TupleType(_SpecialGenericAlias, _root=True):
-1566    @_tp_cache
-1567    def __getitem__(self, params):
-1568        if not isinstance(params, tuple):
-1569            params = (params,)
-1570        if len(params) >= 2 and params[-1] is ...:
-1571            msg = "Tuple[t, ...]: t must be a type."
-1572            params = tuple(_type_check(p, msg) for p in params[:-1])
-1573            return self.copy_with((*params, _TypingEllipsis))
-1574        msg = "Tuple[t0, t1, ...]: each t must be a type."
-1575        params = tuple(_type_check(p, msg) for p in params)
-1576        return self.copy_with(params)
-1577
-1578
-1579class _UnionGenericAlias(_NotIterable, _GenericAlias, _root=True):
-1580    def copy_with(self, params):
-1581        return Union[params]
-1582
-1583    def __eq__(self, other):
-1584        if not isinstance(other, (_UnionGenericAlias, types.UnionType)):
-1585            return NotImplemented
-1586        try:  # fast path
-1587            return set(self.__args__) == set(other.__args__)
-1588        except TypeError:  # not hashable, slow path
-1589            return _compare_args_orderless(self.__args__, other.__args__)
-1590
-1591    def __hash__(self):
-1592        return hash(frozenset(self.__args__))
-1593
-1594    def __repr__(self):
-1595        args = self.__args__
-1596        if len(args) == 2:
-1597            if args[0] is type(None):
-1598                return f'typing.Optional[{_type_repr(args[1])}]'
-1599            elif args[1] is type(None):
-1600                return f'typing.Optional[{_type_repr(args[0])}]'
-1601        return super().__repr__()
+1499    def __reduce__(self):
+1500        return self._name
+1501
+1502    def __or__(self, right):
+1503        return Union[self, right]
+1504
+1505    def __ror__(self, left):
+1506        return Union[left, self]
+1507
+1508
+1509class _DeprecatedGenericAlias(_SpecialGenericAlias, _root=True):
+1510    def __init__(
+1511        self, origin, nparams, *, removal_version, inst=True, name=None
+1512    ):
+1513        super().__init__(origin, nparams, inst=inst, name=name)
+1514        self._removal_version = removal_version
+1515
+1516    def __instancecheck__(self, inst):
+1517        import warnings
+1518        warnings._deprecated(
+1519            f"{self.__module__}.{self._name}", remove=self._removal_version
+1520        )
+1521        return super().__instancecheck__(inst)
+1522
+1523
+1524class _CallableGenericAlias(_NotIterable, _GenericAlias, _root=True):
+1525    def __repr__(self):
+1526        assert self._name == 'Callable'
+1527        args = self.__args__
+1528        if len(args) == 2 and _is_param_expr(args[0]):
+1529            return super().__repr__()
+1530        return (f'typing.Callable'
+1531                f'[[{", ".join([_type_repr(a) for a in args[:-1]])}], '
+1532                f'{_type_repr(args[-1])}]')
+1533
+1534    def __reduce__(self):
+1535        args = self.__args__
+1536        if not (len(args) == 2 and _is_param_expr(args[0])):
+1537            args = list(args[:-1]), args[-1]
+1538        return operator.getitem, (Callable, args)
+1539
+1540
+1541class _CallableType(_SpecialGenericAlias, _root=True):
+1542    def copy_with(self, params):
+1543        return _CallableGenericAlias(self.__origin__, params,
+1544                                     name=self._name, inst=self._inst)
+1545
+1546    def __getitem__(self, params):
+1547        if not isinstance(params, tuple) or len(params) != 2:
+1548            raise TypeError("Callable must be used as "
+1549                            "Callable[[arg, ...], result].")
+1550        args, result = params
+1551        # This relaxes what args can be on purpose to allow things like
+1552        # PEP 612 ParamSpec.  Responsibility for whether a user is using
+1553        # Callable[...] properly is deferred to static type checkers.
+1554        if isinstance(args, list):
+1555            params = (tuple(args), result)
+1556        else:
+1557            params = (args, result)
+1558        return self.__getitem_inner__(params)
+1559
+1560    @_tp_cache
+1561    def __getitem_inner__(self, params):
+1562        args, result = params
+1563        msg = "Callable[args, result]: result must be a type."
+1564        result = _type_check(result, msg)
+1565        if args is Ellipsis:
+1566            return self.copy_with((_TypingEllipsis, result))
+1567        if not isinstance(args, tuple):
+1568            args = (args,)
+1569        args = tuple(_type_convert(arg) for arg in args)
+1570        params = args + (result,)
+1571        return self.copy_with(params)
+1572
+1573
+1574class _TupleType(_SpecialGenericAlias, _root=True):
+1575    @_tp_cache
+1576    def __getitem__(self, params):
+1577        if not isinstance(params, tuple):
+1578            params = (params,)
+1579        if len(params) >= 2 and params[-1] is ...:
+1580            msg = "Tuple[t, ...]: t must be a type."
+1581            params = tuple(_type_check(p, msg) for p in params[:-1])
+1582            return self.copy_with((*params, _TypingEllipsis))
+1583        msg = "Tuple[t0, t1, ...]: each t must be a type."
+1584        params = tuple(_type_check(p, msg) for p in params)
+1585        return self.copy_with(params)
+1586
+1587
+1588class _UnionGenericAlias(_NotIterable, _GenericAlias, _root=True):
+1589    def copy_with(self, params):
+1590        return Union[params]
+1591
+1592    def __eq__(self, other):
+1593        if not isinstance(other, (_UnionGenericAlias, types.UnionType)):
+1594            return NotImplemented
+1595        try:  # fast path
+1596            return set(self.__args__) == set(other.__args__)
+1597        except TypeError:  # not hashable, slow path
+1598            return _compare_args_orderless(self.__args__, other.__args__)
+1599
+1600    def __hash__(self):
+1601        return hash(frozenset(self.__args__))
 1602
-1603    def __instancecheck__(self, obj):
-1604        return self.__subclasscheck__(type(obj))
-1605
-1606    def __subclasscheck__(self, cls):
-1607        for arg in self.__args__:
-1608            if issubclass(cls, arg):
-1609                return True
-1610
-1611    def __reduce__(self):
-1612        func, (origin, args) = super().__reduce__()
-1613        return func, (Union, args)
+1603    def __repr__(self):
+1604        args = self.__args__
+1605        if len(args) == 2:
+1606            if args[0] is type(None):
+1607                return f'typing.Optional[{_type_repr(args[1])}]'
+1608            elif args[1] is type(None):
+1609                return f'typing.Optional[{_type_repr(args[0])}]'
+1610        return super().__repr__()
+1611
+1612    def __instancecheck__(self, obj):
+1613        return self.__subclasscheck__(type(obj))
 1614
-1615
-1616def _value_and_type_iter(parameters):
-1617    return ((p, type(p)) for p in parameters)
-1618
+1615    def __subclasscheck__(self, cls):
+1616        for arg in self.__args__:
+1617            if issubclass(cls, arg):
+1618                return True
 1619
-1620class _LiteralGenericAlias(_GenericAlias, _root=True):
-1621    def __eq__(self, other):
-1622        if not isinstance(other, _LiteralGenericAlias):
-1623            return NotImplemented
+1620    def __reduce__(self):
+1621        func, (origin, args) = super().__reduce__()
+1622        return func, (Union, args)
+1623
 1624
-1625        return set(_value_and_type_iter(self.__args__)) == set(_value_and_type_iter(other.__args__))
-1626
-1627    def __hash__(self):
-1628        return hash(frozenset(_value_and_type_iter(self.__args__)))
-1629
-1630
-1631class _ConcatenateGenericAlias(_GenericAlias, _root=True):
-1632    def copy_with(self, params):
-1633        if isinstance(params[-1], (list, tuple)):
-1634            return (*params[:-1], *params[-1])
-1635        if isinstance(params[-1], _ConcatenateGenericAlias):
-1636            params = (*params[:-1], *params[-1].__args__)
-1637        return super().copy_with(params)
+1625def _value_and_type_iter(parameters):
+1626    return ((p, type(p)) for p in parameters)
+1627
+1628
+1629class _LiteralGenericAlias(_GenericAlias, _root=True):
+1630    def __eq__(self, other):
+1631        if not isinstance(other, _LiteralGenericAlias):
+1632            return NotImplemented
+1633
+1634        return set(_value_and_type_iter(self.__args__)) == set(_value_and_type_iter(other.__args__))
+1635
+1636    def __hash__(self):
+1637        return hash(frozenset(_value_and_type_iter(self.__args__)))
 1638
 1639
-1640@_SpecialForm
-1641def Unpack(self, parameters):
-1642    """Type unpack operator.
-1643
-1644    The type unpack operator takes the child types from some container type,
-1645    such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'.
-1646
-1647    For example::
+1640class _ConcatenateGenericAlias(_GenericAlias, _root=True):
+1641    def copy_with(self, params):
+1642        if isinstance(params[-1], (list, tuple)):
+1643            return (*params[:-1], *params[-1])
+1644        if isinstance(params[-1], _ConcatenateGenericAlias):
+1645            params = (*params[:-1], *params[-1].__args__)
+1646        return super().copy_with(params)
+1647
 1648
-1649        # For some generic class `Foo`:
-1650        Foo[Unpack[tuple[int, str]]]  # Equivalent to Foo[int, str]
-1651
-1652        Ts = TypeVarTuple('Ts')
-1653        # Specifies that `Bar` is generic in an arbitrary number of types.
-1654        # (Think of `Ts` as a tuple of an arbitrary number of individual
-1655        #  `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
-1656        #  `Generic[]`.)
-1657        class Bar(Generic[Unpack[Ts]]): ...
-1658        Bar[int]  # Valid
-1659        Bar[int, str]  # Also valid
+1649@_SpecialForm
+1650def Unpack(self, parameters):
+1651    """Type unpack operator.
+1652
+1653    The type unpack operator takes the child types from some container type,
+1654    such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'.
+1655
+1656    For example::
+1657
+1658        # For some generic class `Foo`:
+1659        Foo[Unpack[tuple[int, str]]]  # Equivalent to Foo[int, str]
 1660
-1661    From Python 3.11, this can also be done using the `*` operator::
-1662
-1663        Foo[*tuple[int, str]]
-1664        class Bar(Generic[*Ts]): ...
-1665
-1666    And from Python 3.12, it can be done using built-in syntax for generics::
-1667
-1668        Foo[*tuple[int, str]]
-1669        class Bar[*Ts]: ...
-1670
-1671    The operator can also be used along with a `TypedDict` to annotate
-1672    `**kwargs` in a function signature::
-1673
-1674        class Movie(TypedDict):
-1675            name: str
-1676            year: int
-1677
-1678        # This function expects two keyword arguments - *name* of type `str` and
-1679        # *year* of type `int`.
-1680        def foo(**kwargs: Unpack[Movie]): ...
-1681
-1682    Note that there is only some runtime checking of this operator. Not
-1683    everything the runtime allows may be accepted by static type checkers.
-1684
-1685    For more information, see PEPs 646 and 692.
-1686    """
-1687    item = _type_check(parameters, f'{self} accepts only single type.')
-1688    return _UnpackGenericAlias(origin=self, args=(item,))
-1689
+1661        Ts = TypeVarTuple('Ts')
+1662        # Specifies that `Bar` is generic in an arbitrary number of types.
+1663        # (Think of `Ts` as a tuple of an arbitrary number of individual
+1664        #  `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
+1665        #  `Generic[]`.)
+1666        class Bar(Generic[Unpack[Ts]]): ...
+1667        Bar[int]  # Valid
+1668        Bar[int, str]  # Also valid
+1669
+1670    From Python 3.11, this can also be done using the `*` operator::
+1671
+1672        Foo[*tuple[int, str]]
+1673        class Bar(Generic[*Ts]): ...
+1674
+1675    And from Python 3.12, it can be done using built-in syntax for generics::
+1676
+1677        Foo[*tuple[int, str]]
+1678        class Bar[*Ts]: ...
+1679
+1680    The operator can also be used along with a `TypedDict` to annotate
+1681    `**kwargs` in a function signature::
+1682
+1683        class Movie(TypedDict):
+1684            name: str
+1685            year: int
+1686
+1687        # This function expects two keyword arguments - *name* of type `str` and
+1688        # *year* of type `int`.
+1689        def foo(**kwargs: Unpack[Movie]): ...
 1690
-1691class _UnpackGenericAlias(_GenericAlias, _root=True):
-1692    def __repr__(self):
-1693        # `Unpack` only takes one argument, so __args__ should contain only
-1694        # a single item.
-1695        return f'typing.Unpack[{_type_repr(self.__args__[0])}]'
-1696
-1697    def __getitem__(self, args):
-1698        if self.__typing_is_unpacked_typevartuple__:
-1699            return args
-1700        return super().__getitem__(args)
-1701
-1702    @property
-1703    def __typing_unpacked_tuple_args__(self):
-1704        assert self.__origin__ is Unpack
-1705        assert len(self.__args__) == 1
-1706        arg, = self.__args__
-1707        if isinstance(arg, (_GenericAlias, types.GenericAlias)):
-1708            if arg.__origin__ is not tuple:
-1709                raise TypeError("Unpack[...] must be used with a tuple type")
-1710            return arg.__args__
-1711        return None
-1712
-1713    @property
-1714    def __typing_is_unpacked_typevartuple__(self):
-1715        assert self.__origin__ is Unpack
-1716        assert len(self.__args__) == 1
-1717        return isinstance(self.__args__[0], TypeVarTuple)
-1718
-1719
-1720class _TypingEllipsis:
-1721    """Internal placeholder for ... (ellipsis)."""
-1722
-1723
-1724_TYPING_INTERNALS = frozenset({
-1725    '__parameters__', '__orig_bases__',  '__orig_class__',
-1726    '_is_protocol', '_is_runtime_protocol', '__protocol_attrs__',
-1727    '__non_callable_proto_members__', '__type_params__',
-1728})
-1729
-1730_SPECIAL_NAMES = frozenset({
-1731    '__abstractmethods__', '__annotations__', '__dict__', '__doc__',
-1732    '__init__', '__module__', '__new__', '__slots__',
-1733    '__subclasshook__', '__weakref__', '__class_getitem__'
-1734})
-1735
-1736# These special attributes will be not collected as protocol members.
-1737EXCLUDED_ATTRIBUTES = _TYPING_INTERNALS | _SPECIAL_NAMES | {'_MutableMapping__marker'}
+1691    Note that there is only some runtime checking of this operator. Not
+1692    everything the runtime allows may be accepted by static type checkers.
+1693
+1694    For more information, see PEPs 646 and 692.
+1695    """
+1696    item = _type_check(parameters, f'{self} accepts only single type.')
+1697    return _UnpackGenericAlias(origin=self, args=(item,))
+1698
+1699
+1700class _UnpackGenericAlias(_GenericAlias, _root=True):
+1701    def __repr__(self):
+1702        # `Unpack` only takes one argument, so __args__ should contain only
+1703        # a single item.
+1704        return f'typing.Unpack[{_type_repr(self.__args__[0])}]'
+1705
+1706    def __getitem__(self, args):
+1707        if self.__typing_is_unpacked_typevartuple__:
+1708            return args
+1709        return super().__getitem__(args)
+1710
+1711    @property
+1712    def __typing_unpacked_tuple_args__(self):
+1713        assert self.__origin__ is Unpack
+1714        assert len(self.__args__) == 1
+1715        arg, = self.__args__
+1716        if isinstance(arg, (_GenericAlias, types.GenericAlias)):
+1717            if arg.__origin__ is not tuple:
+1718                raise TypeError("Unpack[...] must be used with a tuple type")
+1719            return arg.__args__
+1720        return None
+1721
+1722    @property
+1723    def __typing_is_unpacked_typevartuple__(self):
+1724        assert self.__origin__ is Unpack
+1725        assert len(self.__args__) == 1
+1726        return isinstance(self.__args__[0], TypeVarTuple)
+1727
+1728
+1729class _TypingEllipsis:
+1730    """Internal placeholder for ... (ellipsis)."""
+1731
+1732
+1733_TYPING_INTERNALS = frozenset({
+1734    '__parameters__', '__orig_bases__',  '__orig_class__',
+1735    '_is_protocol', '_is_runtime_protocol', '__protocol_attrs__',
+1736    '__non_callable_proto_members__', '__type_params__',
+1737})
 1738
-1739
-1740def _get_protocol_attrs(cls):
-1741    """Collect protocol members from a protocol class objects.
-1742
-1743    This includes names actually defined in the class dictionary, as well
-1744    as names that appear in annotations. Special names (above) are skipped.
-1745    """
-1746    attrs = set()
-1747    for base in cls.__mro__[:-1]:  # without object
-1748        if base.__name__ in {'Protocol', 'Generic'}:
-1749            continue
-1750        annotations = getattr(base, '__annotations__', {})
-1751        for attr in (*base.__dict__, *annotations):
-1752            if not attr.startswith('_abc_') and attr not in EXCLUDED_ATTRIBUTES:
-1753                attrs.add(attr)
-1754    return attrs
-1755
-1756
-1757def _no_init_or_replace_init(self, *args, **kwargs):
-1758    cls = type(self)
-1759
-1760    if cls._is_protocol:
-1761        raise TypeError('Protocols cannot be instantiated')
-1762
-1763    # Already using a custom `__init__`. No need to calculate correct
-1764    # `__init__` to call. This can lead to RecursionError. See bpo-45121.
-1765    if cls.__init__ is not _no_init_or_replace_init:
-1766        return
-1767
-1768    # Initially, `__init__` of a protocol subclass is set to `_no_init_or_replace_init`.
-1769    # The first instantiation of the subclass will call `_no_init_or_replace_init` which
-1770    # searches for a proper new `__init__` in the MRO. The new `__init__`
-1771    # replaces the subclass' old `__init__` (ie `_no_init_or_replace_init`). Subsequent
-1772    # instantiation of the protocol subclass will thus use the new
-1773    # `__init__` and no longer call `_no_init_or_replace_init`.
-1774    for base in cls.__mro__:
-1775        init = base.__dict__.get('__init__', _no_init_or_replace_init)
-1776        if init is not _no_init_or_replace_init:
-1777            cls.__init__ = init
-1778            break
-1779    else:
-1780        # should not happen
-1781        cls.__init__ = object.__init__
-1782
-1783    cls.__init__(self, *args, **kwargs)
-1784
-1785
-1786def _caller(depth=1, default='__main__'):
-1787    try:
-1788        return sys._getframemodulename(depth + 1) or default
-1789    except AttributeError:  # For platforms without _getframemodulename()
-1790        pass
-1791    try:
-1792        return sys._getframe(depth + 1).f_globals.get('__name__', default)
-1793    except (AttributeError, ValueError):  # For platforms without _getframe()
-1794        pass
-1795    return None
-1796
-1797def _allow_reckless_class_checks(depth=2):
-1798    """Allow instance and class checks for special stdlib modules.
-1799
-1800    The abc and functools modules indiscriminately call isinstance() and
-1801    issubclass() on the whole MRO of a user class, which may contain protocols.
-1802    """
-1803    return _caller(depth) in {'abc', 'functools', None}
-1804
+1739_SPECIAL_NAMES = frozenset({
+1740    '__abstractmethods__', '__annotations__', '__dict__', '__doc__',
+1741    '__init__', '__module__', '__new__', '__slots__',
+1742    '__subclasshook__', '__weakref__', '__class_getitem__'
+1743})
+1744
+1745# These special attributes will be not collected as protocol members.
+1746EXCLUDED_ATTRIBUTES = _TYPING_INTERNALS | _SPECIAL_NAMES | {'_MutableMapping__marker'}
+1747
+1748
+1749def _get_protocol_attrs(cls):
+1750    """Collect protocol members from a protocol class objects.
+1751
+1752    This includes names actually defined in the class dictionary, as well
+1753    as names that appear in annotations. Special names (above) are skipped.
+1754    """
+1755    attrs = set()
+1756    for base in cls.__mro__[:-1]:  # without object
+1757        if base.__name__ in {'Protocol', 'Generic'}:
+1758            continue
+1759        annotations = getattr(base, '__annotations__', {})
+1760        for attr in (*base.__dict__, *annotations):
+1761            if not attr.startswith('_abc_') and attr not in EXCLUDED_ATTRIBUTES:
+1762                attrs.add(attr)
+1763    return attrs
+1764
+1765
+1766def _no_init_or_replace_init(self, *args, **kwargs):
+1767    cls = type(self)
+1768
+1769    if cls._is_protocol:
+1770        raise TypeError('Protocols cannot be instantiated')
+1771
+1772    # Already using a custom `__init__`. No need to calculate correct
+1773    # `__init__` to call. This can lead to RecursionError. See bpo-45121.
+1774    if cls.__init__ is not _no_init_or_replace_init:
+1775        return
+1776
+1777    # Initially, `__init__` of a protocol subclass is set to `_no_init_or_replace_init`.
+1778    # The first instantiation of the subclass will call `_no_init_or_replace_init` which
+1779    # searches for a proper new `__init__` in the MRO. The new `__init__`
+1780    # replaces the subclass' old `__init__` (ie `_no_init_or_replace_init`). Subsequent
+1781    # instantiation of the protocol subclass will thus use the new
+1782    # `__init__` and no longer call `_no_init_or_replace_init`.
+1783    for base in cls.__mro__:
+1784        init = base.__dict__.get('__init__', _no_init_or_replace_init)
+1785        if init is not _no_init_or_replace_init:
+1786            cls.__init__ = init
+1787            break
+1788    else:
+1789        # should not happen
+1790        cls.__init__ = object.__init__
+1791
+1792    cls.__init__(self, *args, **kwargs)
+1793
+1794
+1795def _caller(depth=1, default='__main__'):
+1796    try:
+1797        return sys._getframemodulename(depth + 1) or default
+1798    except AttributeError:  # For platforms without _getframemodulename()
+1799        pass
+1800    try:
+1801        return sys._getframe(depth + 1).f_globals.get('__name__', default)
+1802    except (AttributeError, ValueError):  # For platforms without _getframe()
+1803        pass
+1804    return None
 1805
-1806_PROTO_ALLOWLIST = {
-1807    'collections.abc': [
-1808        'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
-1809        'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer',
-1810    ],
-1811    'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
-1812}
+1806def _allow_reckless_class_checks(depth=2):
+1807    """Allow instance and class checks for special stdlib modules.
+1808
+1809    The abc and functools modules indiscriminately call isinstance() and
+1810    issubclass() on the whole MRO of a user class, which may contain protocols.
+1811    """
+1812    return _caller(depth) in {'abc', 'functools', None}
 1813
 1814
-1815@functools.cache
-1816def _lazy_load_getattr_static():
-1817    # Import getattr_static lazily so as not to slow down the import of typing.py
-1818    # Cache the result so we don't slow down _ProtocolMeta.__instancecheck__ unnecessarily
-1819    from inspect import getattr_static
-1820    return getattr_static
-1821
+1815_PROTO_ALLOWLIST = {
+1816    'collections.abc': [
+1817        'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
+1818        'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer',
+1819    ],
+1820    'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
+1821}
 1822
-1823_cleanups.append(_lazy_load_getattr_static.cache_clear)
-1824
-1825def _pickle_psargs(psargs):
-1826    return ParamSpecArgs, (psargs.__origin__,)
-1827
-1828copyreg.pickle(ParamSpecArgs, _pickle_psargs)
-1829
-1830def _pickle_pskwargs(pskwargs):
-1831    return ParamSpecKwargs, (pskwargs.__origin__,)
-1832
-1833copyreg.pickle(ParamSpecKwargs, _pickle_pskwargs)
-1834
-1835del _pickle_psargs, _pickle_pskwargs
+1823
+1824@functools.cache
+1825def _lazy_load_getattr_static():
+1826    # Import getattr_static lazily so as not to slow down the import of typing.py
+1827    # Cache the result so we don't slow down _ProtocolMeta.__instancecheck__ unnecessarily
+1828    from inspect import getattr_static
+1829    return getattr_static
+1830
+1831
+1832_cleanups.append(_lazy_load_getattr_static.cache_clear)
+1833
+1834def _pickle_psargs(psargs):
+1835    return ParamSpecArgs, (psargs.__origin__,)
 1836
-1837
-1838class _ProtocolMeta(ABCMeta):
-1839    # This metaclass is somewhat unfortunate,
-1840    # but is necessary for several reasons...
-1841    def __new__(mcls, name, bases, namespace, /, **kwargs):
-1842        if name == "Protocol" and bases == (Generic,):
-1843            pass
-1844        elif Protocol in bases:
-1845            for base in bases:
-1846                if not (
-1847                    base in {object, Generic}
-1848                    or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
-1849                    or (
-1850                        issubclass(base, Generic)
-1851                        and getattr(base, "_is_protocol", False)
-1852                    )
-1853                ):
-1854                    raise TypeError(
-1855                        f"Protocols can only inherit from other protocols, "
-1856                        f"got {base!r}"
-1857                    )
-1858        return super().__new__(mcls, name, bases, namespace, **kwargs)
-1859
-1860    def __init__(cls, *args, **kwargs):
-1861        super().__init__(*args, **kwargs)
-1862        if getattr(cls, "_is_protocol", False):
-1863            cls.__protocol_attrs__ = _get_protocol_attrs(cls)
-1864
-1865    def __subclasscheck__(cls, other):
-1866        if cls is Protocol:
-1867            return type.__subclasscheck__(cls, other)
-1868        if (
-1869            getattr(cls, '_is_protocol', False)
-1870            and not _allow_reckless_class_checks()
-1871        ):
-1872            if not isinstance(other, type):
-1873                # Same error message as for issubclass(1, int).
-1874                raise TypeError('issubclass() arg 1 must be a class')
-1875            if not getattr(cls, '_is_runtime_protocol', False):
-1876                raise TypeError(
-1877                    "Instance and class checks can only be used with "
-1878                    "@runtime_checkable protocols"
-1879                )
-1880            if (
-1881                # this attribute is set by @runtime_checkable:
-1882                cls.__non_callable_proto_members__
-1883                and cls.__dict__.get("__subclasshook__") is _proto_hook
-1884            ):
+1837copyreg.pickle(ParamSpecArgs, _pickle_psargs)
+1838
+1839def _pickle_pskwargs(pskwargs):
+1840    return ParamSpecKwargs, (pskwargs.__origin__,)
+1841
+1842copyreg.pickle(ParamSpecKwargs, _pickle_pskwargs)
+1843
+1844del _pickle_psargs, _pickle_pskwargs
+1845
+1846
+1847class _ProtocolMeta(ABCMeta):
+1848    # This metaclass is somewhat unfortunate,
+1849    # but is necessary for several reasons...
+1850    def __new__(mcls, name, bases, namespace, /, **kwargs):
+1851        if name == "Protocol" and bases == (Generic,):
+1852            pass
+1853        elif Protocol in bases:
+1854            for base in bases:
+1855                if not (
+1856                    base in {object, Generic}
+1857                    or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
+1858                    or (
+1859                        issubclass(base, Generic)
+1860                        and getattr(base, "_is_protocol", False)
+1861                    )
+1862                ):
+1863                    raise TypeError(
+1864                        f"Protocols can only inherit from other protocols, "
+1865                        f"got {base!r}"
+1866                    )
+1867        return super().__new__(mcls, name, bases, namespace, **kwargs)
+1868
+1869    def __init__(cls, *args, **kwargs):
+1870        super().__init__(*args, **kwargs)
+1871        if getattr(cls, "_is_protocol", False):
+1872            cls.__protocol_attrs__ = _get_protocol_attrs(cls)
+1873
+1874    def __subclasscheck__(cls, other):
+1875        if cls is Protocol:
+1876            return type.__subclasscheck__(cls, other)
+1877        if (
+1878            getattr(cls, '_is_protocol', False)
+1879            and not _allow_reckless_class_checks()
+1880        ):
+1881            if not isinstance(other, type):
+1882                # Same error message as for issubclass(1, int).
+1883                raise TypeError('issubclass() arg 1 must be a class')
+1884            if not getattr(cls, '_is_runtime_protocol', False):
 1885                raise TypeError(
-1886                    "Protocols with non-method members don't support issubclass()"
-1887                )
-1888        return super().__subclasscheck__(other)
-1889
-1890    def __instancecheck__(cls, instance):
-1891        # We need this method for situations where attributes are
-1892        # assigned in __init__.
-1893        if cls is Protocol:
-1894            return type.__instancecheck__(cls, instance)
-1895        if not getattr(cls, "_is_protocol", False):
-1896            # i.e., it's a concrete subclass of a protocol
-1897            return super().__instancecheck__(instance)
+1886                    "Instance and class checks can only be used with "
+1887                    "@runtime_checkable protocols"
+1888                )
+1889            if (
+1890                # this attribute is set by @runtime_checkable:
+1891                cls.__non_callable_proto_members__
+1892                and cls.__dict__.get("__subclasshook__") is _proto_hook
+1893            ):
+1894                raise TypeError(
+1895                    "Protocols with non-method members don't support issubclass()"
+1896                )
+1897        return super().__subclasscheck__(other)
 1898
-1899        if (
-1900            not getattr(cls, '_is_runtime_protocol', False) and
-1901            not _allow_reckless_class_checks()
-1902        ):
-1903            raise TypeError("Instance and class checks can only be used with"
-1904                            " @runtime_checkable protocols")
-1905
-1906        if super().__instancecheck__(instance):
-1907            return True
-1908
-1909        getattr_static = _lazy_load_getattr_static()
-1910        for attr in cls.__protocol_attrs__:
-1911            try:
-1912                val = getattr_static(instance, attr)
-1913            except AttributeError:
-1914                break
-1915            # this attribute is set by @runtime_checkable:
-1916            if val is None and attr not in cls.__non_callable_proto_members__:
-1917                break
-1918        else:
-1919            return True
-1920
-1921        return False
-1922
-1923
-1924@classmethod
-1925def _proto_hook(cls, other):
-1926    if not cls.__dict__.get('_is_protocol', False):
-1927        return NotImplemented
-1928
-1929    for attr in cls.__protocol_attrs__:
-1930        for base in other.__mro__:
-1931            # Check if the members appears in the class dictionary...
-1932            if attr in base.__dict__:
-1933                if base.__dict__[attr] is None:
-1934                    return NotImplemented
-1935                break
-1936
-1937            # ...or in annotations, if it is a sub-protocol.
-1938            annotations = getattr(base, '__annotations__', {})
-1939            if (isinstance(annotations, collections.abc.Mapping) and
-1940                    attr in annotations and
-1941                    issubclass(other, Generic) and getattr(other, '_is_protocol', False)):
-1942                break
-1943        else:
-1944            return NotImplemented
-1945    return True
-1946
-1947
-1948class Protocol(Generic, metaclass=_ProtocolMeta):
-1949    """Base class for protocol classes.
-1950
-1951    Protocol classes are defined as::
-1952
-1953        class Proto(Protocol):
-1954            def meth(self) -> int:
-1955                ...
+1899    def __instancecheck__(cls, instance):
+1900        # We need this method for situations where attributes are
+1901        # assigned in __init__.
+1902        if cls is Protocol:
+1903            return type.__instancecheck__(cls, instance)
+1904        if not getattr(cls, "_is_protocol", False):
+1905            # i.e., it's a concrete subclass of a protocol
+1906            return super().__instancecheck__(instance)
+1907
+1908        if (
+1909            not getattr(cls, '_is_runtime_protocol', False) and
+1910            not _allow_reckless_class_checks()
+1911        ):
+1912            raise TypeError("Instance and class checks can only be used with"
+1913                            " @runtime_checkable protocols")
+1914
+1915        if super().__instancecheck__(instance):
+1916            return True
+1917
+1918        getattr_static = _lazy_load_getattr_static()
+1919        for attr in cls.__protocol_attrs__:
+1920            try:
+1921                val = getattr_static(instance, attr)
+1922            except AttributeError:
+1923                break
+1924            # this attribute is set by @runtime_checkable:
+1925            if val is None and attr not in cls.__non_callable_proto_members__:
+1926                break
+1927        else:
+1928            return True
+1929
+1930        return False
+1931
+1932
+1933@classmethod
+1934def _proto_hook(cls, other):
+1935    if not cls.__dict__.get('_is_protocol', False):
+1936        return NotImplemented
+1937
+1938    for attr in cls.__protocol_attrs__:
+1939        for base in other.__mro__:
+1940            # Check if the members appears in the class dictionary...
+1941            if attr in base.__dict__:
+1942                if base.__dict__[attr] is None:
+1943                    return NotImplemented
+1944                break
+1945
+1946            # ...or in annotations, if it is a sub-protocol.
+1947            annotations = getattr(base, '__annotations__', {})
+1948            if (isinstance(annotations, collections.abc.Mapping) and
+1949                    attr in annotations and
+1950                    issubclass(other, Generic) and getattr(other, '_is_protocol', False)):
+1951                break
+1952        else:
+1953            return NotImplemented
+1954    return True
+1955
 1956
-1957    Such classes are primarily used with static type checkers that recognize
-1958    structural subtyping (static duck-typing).
+1957class Protocol(Generic, metaclass=_ProtocolMeta):
+1958    """Base class for protocol classes.
 1959
-1960    For example::
+1960    Protocol classes are defined as::
 1961
-1962        class C:
+1962        class Proto(Protocol):
 1963            def meth(self) -> int:
-1964                return 0
+1964                ...
 1965
-1966        def func(x: Proto) -> int:
-1967            return x.meth()
+1966    Such classes are primarily used with static type checkers that recognize
+1967    structural subtyping (static duck-typing).
 1968
-1969        func(C())  # Passes static type check
+1969    For example::
 1970
-1971    See PEP 544 for details. Protocol classes decorated with
-1972    @typing.runtime_checkable act as simple-minded runtime protocols that check
-1973    only the presence of given attributes, ignoring their type signatures.
-1974    Protocol classes can be generic, they are defined as::
-1975
-1976        class GenProto[T](Protocol):
-1977            def meth(self) -> T:
-1978                ...
-1979    """
-1980
-1981    __slots__ = ()
-1982    _is_protocol = True
-1983    _is_runtime_protocol = False
+1971        class C:
+1972            def meth(self) -> int:
+1973                return 0
+1974
+1975        def func(x: Proto) -> int:
+1976            return x.meth()
+1977
+1978        func(C())  # Passes static type check
+1979
+1980    See PEP 544 for details. Protocol classes decorated with
+1981    @typing.runtime_checkable act as simple-minded runtime protocols that check
+1982    only the presence of given attributes, ignoring their type signatures.
+1983    Protocol classes can be generic, they are defined as::
 1984
-1985    def __init_subclass__(cls, *args, **kwargs):
-1986        super().__init_subclass__(*args, **kwargs)
-1987
-1988        # Determine if this is a protocol or a concrete subclass.
-1989        if not cls.__dict__.get('_is_protocol', False):
-1990            cls._is_protocol = any(b is Protocol for b in cls.__bases__)
-1991
-1992        # Set (or override) the protocol subclass hook.
-1993        if '__subclasshook__' not in cls.__dict__:
-1994            cls.__subclasshook__ = _proto_hook
-1995
-1996        # Prohibit instantiation for protocol classes
-1997        if cls._is_protocol and cls.__init__ is Protocol.__init__:
-1998            cls.__init__ = _no_init_or_replace_init
-1999
+1985        class GenProto[T](Protocol):
+1986            def meth(self) -> T:
+1987                ...
+1988    """
+1989
+1990    __slots__ = ()
+1991    _is_protocol = True
+1992    _is_runtime_protocol = False
+1993
+1994    def __init_subclass__(cls, *args, **kwargs):
+1995        super().__init_subclass__(*args, **kwargs)
+1996
+1997        # Determine if this is a protocol or a concrete subclass.
+1998        if not cls.__dict__.get('_is_protocol', False):
+1999            cls._is_protocol = any(b is Protocol for b in cls.__bases__)
 2000
-2001class _AnnotatedAlias(_NotIterable, _GenericAlias, _root=True):
-2002    """Runtime representation of an annotated type.
-2003
-2004    At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
-2005    with extra annotations. The alias behaves like a normal typing alias.
-2006    Instantiating is the same as instantiating the underlying type; binding
-2007    it to types is also the same.
+2001        # Set (or override) the protocol subclass hook.
+2002        if '__subclasshook__' not in cls.__dict__:
+2003            cls.__subclasshook__ = _proto_hook
+2004
+2005        # Prohibit instantiation for protocol classes
+2006        if cls._is_protocol and cls.__init__ is Protocol.__init__:
+2007            cls.__init__ = _no_init_or_replace_init
 2008
-2009    The metadata itself is stored in a '__metadata__' attribute as a tuple.
-2010    """
-2011
-2012    def __init__(self, origin, metadata):
-2013        if isinstance(origin, _AnnotatedAlias):
-2014            metadata = origin.__metadata__ + metadata
-2015            origin = origin.__origin__
-2016        super().__init__(origin, origin, name='Annotated')
-2017        self.__metadata__ = metadata
-2018
-2019    def copy_with(self, params):
-2020        assert len(params) == 1
-2021        new_type = params[0]
-2022        return _AnnotatedAlias(new_type, self.__metadata__)
-2023
-2024    def __repr__(self):
-2025        return "typing.Annotated[{}, {}]".format(
-2026            _type_repr(self.__origin__),
-2027            ", ".join(repr(a) for a in self.__metadata__)
-2028        )
-2029
-2030    def __reduce__(self):
-2031        return operator.getitem, (
-2032            Annotated, (self.__origin__,) + self.__metadata__
-2033        )
-2034
-2035    def __eq__(self, other):
-2036        if not isinstance(other, _AnnotatedAlias):
-2037            return NotImplemented
-2038        return (self.__origin__ == other.__origin__
-2039                and self.__metadata__ == other.__metadata__)
-2040
-2041    def __hash__(self):
-2042        return hash((self.__origin__, self.__metadata__))
+2009
+2010class _AnnotatedAlias(_NotIterable, _GenericAlias, _root=True):
+2011    """Runtime representation of an annotated type.
+2012
+2013    At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
+2014    with extra annotations. The alias behaves like a normal typing alias.
+2015    Instantiating is the same as instantiating the underlying type; binding
+2016    it to types is also the same.
+2017
+2018    The metadata itself is stored in a '__metadata__' attribute as a tuple.
+2019    """
+2020
+2021    def __init__(self, origin, metadata):
+2022        if isinstance(origin, _AnnotatedAlias):
+2023            metadata = origin.__metadata__ + metadata
+2024            origin = origin.__origin__
+2025        super().__init__(origin, origin, name='Annotated')
+2026        self.__metadata__ = metadata
+2027
+2028    def copy_with(self, params):
+2029        assert len(params) == 1
+2030        new_type = params[0]
+2031        return _AnnotatedAlias(new_type, self.__metadata__)
+2032
+2033    def __repr__(self):
+2034        return "typing.Annotated[{}, {}]".format(
+2035            _type_repr(self.__origin__),
+2036            ", ".join(repr(a) for a in self.__metadata__)
+2037        )
+2038
+2039    def __reduce__(self):
+2040        return operator.getitem, (
+2041            Annotated, (self.__origin__,) + self.__metadata__
+2042        )
 2043
-2044    def __getattr__(self, attr):
-2045        if attr in {'__name__', '__qualname__'}:
-2046            return 'Annotated'
-2047        return super().__getattr__(attr)
-2048
-2049    def __mro_entries__(self, bases):
-2050        return (self.__origin__,)
-2051
+2044    def __eq__(self, other):
+2045        if not isinstance(other, _AnnotatedAlias):
+2046            return NotImplemented
+2047        return (self.__origin__ == other.__origin__
+2048                and self.__metadata__ == other.__metadata__)
+2049
+2050    def __hash__(self):
+2051        return hash((self.__origin__, self.__metadata__))
 2052
-2053class Annotated:
-2054    """Add context-specific metadata to a type.
-2055
-2056    Example: Annotated[int, runtime_check.Unsigned] indicates to the
-2057    hypothetical runtime_check module that this type is an unsigned int.
-2058    Every other consumer of this type can ignore this metadata and treat
-2059    this type as int.
+2053    def __getattr__(self, attr):
+2054        if attr in {'__name__', '__qualname__'}:
+2055            return 'Annotated'
+2056        return super().__getattr__(attr)
+2057
+2058    def __mro_entries__(self, bases):
+2059        return (self.__origin__,)
 2060
-2061    The first argument to Annotated must be a valid type.
-2062
-2063    Details:
+2061
+2062class Annotated:
+2063    """Add context-specific metadata to a type.
 2064
-2065    - It's an error to call `Annotated` with less than two arguments.
-2066    - Access the metadata via the ``__metadata__`` attribute::
-2067
-2068        assert Annotated[int, '$'].__metadata__ == ('$',)
+2065    Example: Annotated[int, runtime_check.Unsigned] indicates to the
+2066    hypothetical runtime_check module that this type is an unsigned int.
+2067    Every other consumer of this type can ignore this metadata and treat
+2068    this type as int.
 2069
-2070    - Nested Annotated types are flattened::
+2070    The first argument to Annotated must be a valid type.
 2071
-2072        assert Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+2072    Details:
 2073
-2074    - Instantiating an annotated type is equivalent to instantiating the
-2075    underlying type::
+2074    - It's an error to call `Annotated` with less than two arguments.
+2075    - Access the metadata via the ``__metadata__`` attribute::
 2076
-2077        assert Annotated[C, Ann1](5) == C(5)
+2077        assert Annotated[int, '$'].__metadata__ == ('$',)
 2078
-2079    - Annotated can be used as a generic type alias::
+2079    - Nested Annotated types are flattened::
 2080
-2081        type Optimized[T] = Annotated[T, runtime.Optimize()]
-2082        # type checker will treat Optimized[int]
-2083        # as equivalent to Annotated[int, runtime.Optimize()]
-2084
-2085        type OptimizedList[T] = Annotated[list[T], runtime.Optimize()]
-2086        # type checker will treat OptimizedList[int]
-2087        # as equivalent to Annotated[list[int], runtime.Optimize()]
-2088
-2089    - Annotated cannot be used with an unpacked TypeVarTuple::
-2090
-2091        type Variadic[*Ts] = Annotated[*Ts, Ann1]  # NOT valid
-2092
-2093      This would be equivalent to::
-2094
-2095        Annotated[T1, T2, T3, ..., Ann1]
-2096
-2097      where T1, T2 etc. are TypeVars, which would be invalid, because
-2098      only one type should be passed to Annotated.
-2099    """
-2100
-2101    __slots__ = ()
-2102
-2103    def __new__(cls, *args, **kwargs):
-2104        raise TypeError("Type Annotated cannot be instantiated.")
+2081        assert Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+2082
+2083    - Instantiating an annotated type is equivalent to instantiating the
+2084    underlying type::
+2085
+2086        assert Annotated[C, Ann1](5) == C(5)
+2087
+2088    - Annotated can be used as a generic type alias::
+2089
+2090        type Optimized[T] = Annotated[T, runtime.Optimize()]
+2091        # type checker will treat Optimized[int]
+2092        # as equivalent to Annotated[int, runtime.Optimize()]
+2093
+2094        type OptimizedList[T] = Annotated[list[T], runtime.Optimize()]
+2095        # type checker will treat OptimizedList[int]
+2096        # as equivalent to Annotated[list[int], runtime.Optimize()]
+2097
+2098    - Annotated cannot be used with an unpacked TypeVarTuple::
+2099
+2100        type Variadic[*Ts] = Annotated[*Ts, Ann1]  # NOT valid
+2101
+2102      This would be equivalent to::
+2103
+2104        Annotated[T1, T2, T3, ..., Ann1]
 2105
-2106    def __class_getitem__(cls, params):
-2107        if not isinstance(params, tuple):
-2108            params = (params,)
-2109        return cls._class_getitem_inner(cls, *params)
-2110
-2111    @_tp_cache(typed=True)
-2112    def _class_getitem_inner(cls, *params):
-2113        if len(params) < 2:
-2114            raise TypeError("Annotated[...] should be used "
-2115                            "with at least two arguments (a type and an "
-2116                            "annotation).")
-2117        if _is_unpacked_typevartuple(params[0]):
-2118            raise TypeError("Annotated[...] should not be used with an "
-2119                            "unpacked TypeVarTuple")
-2120        msg = "Annotated[t, ...]: t must be a type."
-2121        origin = _type_check(params[0], msg, allow_special_forms=True)
-2122        metadata = tuple(params[1:])
-2123        return _AnnotatedAlias(origin, metadata)
-2124
-2125    def __init_subclass__(cls, *args, **kwargs):
-2126        raise TypeError(
-2127            "Cannot subclass {}.Annotated".format(cls.__module__)
-2128        )
-2129
-2130
-2131def runtime_checkable(cls):
-2132    """Mark a protocol class as a runtime protocol.
+2106      where T1, T2 etc. are TypeVars, which would be invalid, because
+2107      only one type should be passed to Annotated.
+2108    """
+2109
+2110    __slots__ = ()
+2111
+2112    def __new__(cls, *args, **kwargs):
+2113        raise TypeError("Type Annotated cannot be instantiated.")
+2114
+2115    def __class_getitem__(cls, params):
+2116        if not isinstance(params, tuple):
+2117            params = (params,)
+2118        return cls._class_getitem_inner(cls, *params)
+2119
+2120    @_tp_cache(typed=True)
+2121    def _class_getitem_inner(cls, *params):
+2122        if len(params) < 2:
+2123            raise TypeError("Annotated[...] should be used "
+2124                            "with at least two arguments (a type and an "
+2125                            "annotation).")
+2126        if _is_unpacked_typevartuple(params[0]):
+2127            raise TypeError("Annotated[...] should not be used with an "
+2128                            "unpacked TypeVarTuple")
+2129        msg = "Annotated[t, ...]: t must be a type."
+2130        origin = _type_check(params[0], msg, allow_special_forms=True)
+2131        metadata = tuple(params[1:])
+2132        return _AnnotatedAlias(origin, metadata)
 2133
-2134    Such protocol can be used with isinstance() and issubclass().
-2135    Raise TypeError if applied to a non-protocol class.
-2136    This allows a simple-minded structural check very similar to
-2137    one trick ponies in collections.abc such as Iterable.
+2134    def __init_subclass__(cls, *args, **kwargs):
+2135        raise TypeError(
+2136            "Cannot subclass {}.Annotated".format(cls.__module__)
+2137        )
 2138
-2139    For example::
-2140
-2141        @runtime_checkable
-2142        class Closable(Protocol):
-2143            def close(self): ...
-2144
-2145        assert isinstance(open('/some/file'), Closable)
-2146
-2147    Warning: this will check only the presence of the required methods,
-2148    not their type signatures!
-2149    """
-2150    if not issubclass(cls, Generic) or not getattr(cls, '_is_protocol', False):
-2151        raise TypeError('@runtime_checkable can be only applied to protocol classes,'
-2152                        ' got %r' % cls)
-2153    cls._is_runtime_protocol = True
-2154    # PEP 544 prohibits using issubclass()
-2155    # with protocols that have non-method members.
-2156    # See gh-113320 for why we compute this attribute here,
-2157    # rather than in `_ProtocolMeta.__init__`
-2158    cls.__non_callable_proto_members__ = set()
-2159    for attr in cls.__protocol_attrs__:
-2160        try:
-2161            is_callable = callable(getattr(cls, attr, None))
-2162        except Exception as e:
-2163            raise TypeError(
-2164                f"Failed to determine whether protocol member {attr!r} "
-2165                "is a method member"
-2166            ) from e
-2167        else:
-2168            if not is_callable:
-2169                cls.__non_callable_proto_members__.add(attr)
-2170    return cls
-2171
-2172
-2173def cast(typ, val):
-2174    """Cast a value to a type.
-2175
-2176    This returns the value unchanged.  To the type checker this
-2177    signals that the return value has the designated type, but at
-2178    runtime we intentionally don't check anything (we want this
-2179    to be as fast as possible).
-2180    """
-2181    return val
-2182
-2183
-2184def assert_type(val, typ, /):
-2185    """Ask a static type checker to confirm that the value is of the given type.
-2186
-2187    At runtime this does nothing: it returns the first argument unchanged with no
-2188    checks or side effects, no matter the actual type of the argument.
-2189
-2190    When a static type checker encounters a call to assert_type(), it
-2191    emits an error if the value is not of the specified type::
+2139
+2140def runtime_checkable(cls):
+2141    """Mark a protocol class as a runtime protocol.
+2142
+2143    Such protocol can be used with isinstance() and issubclass().
+2144    Raise TypeError if applied to a non-protocol class.
+2145    This allows a simple-minded structural check very similar to
+2146    one trick ponies in collections.abc such as Iterable.
+2147
+2148    For example::
+2149
+2150        @runtime_checkable
+2151        class Closable(Protocol):
+2152            def close(self): ...
+2153
+2154        assert isinstance(open('/some/file'), Closable)
+2155
+2156    Warning: this will check only the presence of the required methods,
+2157    not their type signatures!
+2158    """
+2159    if not issubclass(cls, Generic) or not getattr(cls, '_is_protocol', False):
+2160        raise TypeError('@runtime_checkable can be only applied to protocol classes,'
+2161                        ' got %r' % cls)
+2162    cls._is_runtime_protocol = True
+2163    # PEP 544 prohibits using issubclass()
+2164    # with protocols that have non-method members.
+2165    # See gh-113320 for why we compute this attribute here,
+2166    # rather than in `_ProtocolMeta.__init__`
+2167    cls.__non_callable_proto_members__ = set()
+2168    for attr in cls.__protocol_attrs__:
+2169        try:
+2170            is_callable = callable(getattr(cls, attr, None))
+2171        except Exception as e:
+2172            raise TypeError(
+2173                f"Failed to determine whether protocol member {attr!r} "
+2174                "is a method member"
+2175            ) from e
+2176        else:
+2177            if not is_callable:
+2178                cls.__non_callable_proto_members__.add(attr)
+2179    return cls
+2180
+2181
+2182def cast(typ, val):
+2183    """Cast a value to a type.
+2184
+2185    This returns the value unchanged.  To the type checker this
+2186    signals that the return value has the designated type, but at
+2187    runtime we intentionally don't check anything (we want this
+2188    to be as fast as possible).
+2189    """
+2190    return val
+2191
 2192
-2193        def greet(name: str) -> None:
-2194            assert_type(name, str)  # OK
-2195            assert_type(name, int)  # type checker error
-2196    """
-2197    return val
+2193def assert_type(val, typ, /):
+2194    """Ask a static type checker to confirm that the value is of the given type.
+2195
+2196    At runtime this does nothing: it returns the first argument unchanged with no
+2197    checks or side effects, no matter the actual type of the argument.
 2198
-2199
-2200_allowed_types = (types.FunctionType, types.BuiltinFunctionType,
-2201                  types.MethodType, types.ModuleType,
-2202                  WrapperDescriptorType, MethodWrapperType, MethodDescriptorType)
-2203
-2204
-2205def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
-2206    """Return type hints for an object.
+2199    When a static type checker encounters a call to assert_type(), it
+2200    emits an error if the value is not of the specified type::
+2201
+2202        def greet(name: str) -> None:
+2203            assert_type(name, str)  # OK
+2204            assert_type(name, int)  # type checker error
+2205    """
+2206    return val
 2207
-2208    This is often the same as obj.__annotations__, but it handles
-2209    forward references encoded as string literals and recursively replaces all
-2210    'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
-2211
-2212    The argument may be a module, class, method, or function. The annotations
-2213    are returned as a dictionary. For classes, annotations include also
-2214    inherited members.
-2215
-2216    TypeError is raised if the argument is not of a type that can contain
-2217    annotations, and an empty dictionary is returned if no annotations are
-2218    present.
-2219
-2220    BEWARE -- the behavior of globalns and localns is counterintuitive
-2221    (unless you are familiar with how eval() and exec() work).  The
-2222    search order is locals first, then globals.
-2223
-2224    - If no dict arguments are passed, an attempt is made to use the
-2225      globals from obj (or the respective module's globals for classes),
-2226      and these are also used as the locals.  If the object does not appear
-2227      to have globals, an empty dictionary is used.  For classes, the search
-2228      order is globals first then locals.
-2229
-2230    - If one dict argument is passed, it is used for both globals and
-2231      locals.
+2208
+2209_allowed_types = (types.FunctionType, types.BuiltinFunctionType,
+2210                  types.MethodType, types.ModuleType,
+2211                  WrapperDescriptorType, MethodWrapperType, MethodDescriptorType)
+2212
+2213
+2214def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
+2215    """Return type hints for an object.
+2216
+2217    This is often the same as obj.__annotations__, but it handles
+2218    forward references encoded as string literals and recursively replaces all
+2219    'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
+2220
+2221    The argument may be a module, class, method, or function. The annotations
+2222    are returned as a dictionary. For classes, annotations include also
+2223    inherited members.
+2224
+2225    TypeError is raised if the argument is not of a type that can contain
+2226    annotations, and an empty dictionary is returned if no annotations are
+2227    present.
+2228
+2229    BEWARE -- the behavior of globalns and localns is counterintuitive
+2230    (unless you are familiar with how eval() and exec() work).  The
+2231    search order is locals first, then globals.
 2232
-2233    - If two dict arguments are passed, they specify globals and
-2234      locals, respectively.
-2235    """
-2236    if getattr(obj, '__no_type_check__', None):
-2237        return {}
-2238    # Classes require a special treatment.
-2239    if isinstance(obj, type):
-2240        hints = {}
-2241        for base in reversed(obj.__mro__):
-2242            if globalns is None:
-2243                base_globals = getattr(sys.modules.get(base.__module__, None), '__dict__', {})
-2244            else:
-2245                base_globals = globalns
-2246            ann = base.__dict__.get('__annotations__', {})
-2247            if isinstance(ann, types.GetSetDescriptorType):
-2248                ann = {}
-2249            base_locals = dict(vars(base)) if localns is None else localns
-2250            if localns is None and globalns is None:
-2251                # This is surprising, but required.  Before Python 3.10,
-2252                # get_type_hints only evaluated the globalns of
-2253                # a class.  To maintain backwards compatibility, we reverse
-2254                # the globalns and localns order so that eval() looks into
-2255                # *base_globals* first rather than *base_locals*.
-2256                # This only affects ForwardRefs.
-2257                base_globals, base_locals = base_locals, base_globals
-2258            for name, value in ann.items():
-2259                if value is None:
-2260                    value = type(None)
-2261                if isinstance(value, str):
-2262                    value = ForwardRef(value, is_argument=False, is_class=True)
-2263                value = _eval_type(value, base_globals, base_locals, base.__type_params__)
-2264                hints[name] = value
-2265        return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
-2266
-2267    if globalns is None:
-2268        if isinstance(obj, types.ModuleType):
-2269            globalns = obj.__dict__
-2270        else:
-2271            nsobj = obj
-2272            # Find globalns for the unwrapped object.
-2273            while hasattr(nsobj, '__wrapped__'):
-2274                nsobj = nsobj.__wrapped__
-2275            globalns = getattr(nsobj, '__globals__', {})
-2276        if localns is None:
-2277            localns = globalns
-2278    elif localns is None:
-2279        localns = globalns
-2280    hints = getattr(obj, '__annotations__', None)
-2281    if hints is None:
-2282        # Return empty annotations for something that _could_ have them.
-2283        if isinstance(obj, _allowed_types):
-2284            return {}
-2285        else:
-2286            raise TypeError('{!r} is not a module, class, method, '
-2287                            'or function.'.format(obj))
-2288    hints = dict(hints)
-2289    type_params = getattr(obj, "__type_params__", ())
-2290    for name, value in hints.items():
-2291        if value is None:
-2292            value = type(None)
-2293        if isinstance(value, str):
-2294            # class-level forward refs were handled above, this must be either
-2295            # a module-level annotation or a function argument annotation
-2296            value = ForwardRef(
-2297                value,
-2298                is_argument=not isinstance(obj, types.ModuleType),
-2299                is_class=False,
-2300            )
-2301        hints[name] = _eval_type(value, globalns, localns, type_params)
-2302    return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
-2303
-2304
-2305def _strip_annotations(t):
-2306    """Strip the annotations from a given type."""
-2307    if isinstance(t, _AnnotatedAlias):
-2308        return _strip_annotations(t.__origin__)
-2309    if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired):
-2310        return _strip_annotations(t.__args__[0])
-2311    if isinstance(t, _GenericAlias):
-2312        stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
-2313        if stripped_args == t.__args__:
-2314            return t
-2315        return t.copy_with(stripped_args)
-2316    if isinstance(t, GenericAlias):
-2317        stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
-2318        if stripped_args == t.__args__:
-2319            return t
-2320        return GenericAlias(t.__origin__, stripped_args)
-2321    if isinstance(t, types.UnionType):
-2322        stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
-2323        if stripped_args == t.__args__:
-2324            return t
-2325        return functools.reduce(operator.or_, stripped_args)
-2326
-2327    return t
-2328
-2329
-2330def get_origin(tp):
-2331    """Get the unsubscripted version of a type.
-2332
-2333    This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar,
-2334    Annotated, and others. Return None for unsupported types.
+2233    - If no dict arguments are passed, an attempt is made to use the
+2234      globals from obj (or the respective module's globals for classes),
+2235      and these are also used as the locals.  If the object does not appear
+2236      to have globals, an empty dictionary is used.  For classes, the search
+2237      order is globals first then locals.
+2238
+2239    - If one dict argument is passed, it is used for both globals and
+2240      locals.
+2241
+2242    - If two dict arguments are passed, they specify globals and
+2243      locals, respectively.
+2244    """
+2245    if getattr(obj, '__no_type_check__', None):
+2246        return {}
+2247    # Classes require a special treatment.
+2248    if isinstance(obj, type):
+2249        hints = {}
+2250        for base in reversed(obj.__mro__):
+2251            if globalns is None:
+2252                base_globals = getattr(sys.modules.get(base.__module__, None), '__dict__', {})
+2253            else:
+2254                base_globals = globalns
+2255            ann = base.__dict__.get('__annotations__', {})
+2256            if isinstance(ann, types.GetSetDescriptorType):
+2257                ann = {}
+2258            base_locals = dict(vars(base)) if localns is None else localns
+2259            if localns is None and globalns is None:
+2260                # This is surprising, but required.  Before Python 3.10,
+2261                # get_type_hints only evaluated the globalns of
+2262                # a class.  To maintain backwards compatibility, we reverse
+2263                # the globalns and localns order so that eval() looks into
+2264                # *base_globals* first rather than *base_locals*.
+2265                # This only affects ForwardRefs.
+2266                base_globals, base_locals = base_locals, base_globals
+2267            for name, value in ann.items():
+2268                if value is None:
+2269                    value = type(None)
+2270                if isinstance(value, str):
+2271                    value = ForwardRef(value, is_argument=False, is_class=True)
+2272                value = _eval_type(value, base_globals, base_locals, base.__type_params__)
+2273                hints[name] = value
+2274        return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
+2275
+2276    if globalns is None:
+2277        if isinstance(obj, types.ModuleType):
+2278            globalns = obj.__dict__
+2279        else:
+2280            nsobj = obj
+2281            # Find globalns for the unwrapped object.
+2282            while hasattr(nsobj, '__wrapped__'):
+2283                nsobj = nsobj.__wrapped__
+2284            globalns = getattr(nsobj, '__globals__', {})
+2285        if localns is None:
+2286            localns = globalns
+2287    elif localns is None:
+2288        localns = globalns
+2289    hints = getattr(obj, '__annotations__', None)
+2290    if hints is None:
+2291        # Return empty annotations for something that _could_ have them.
+2292        if isinstance(obj, _allowed_types):
+2293            return {}
+2294        else:
+2295            raise TypeError('{!r} is not a module, class, method, '
+2296                            'or function.'.format(obj))
+2297    hints = dict(hints)
+2298    type_params = getattr(obj, "__type_params__", ())
+2299    for name, value in hints.items():
+2300        if value is None:
+2301            value = type(None)
+2302        if isinstance(value, str):
+2303            # class-level forward refs were handled above, this must be either
+2304            # a module-level annotation or a function argument annotation
+2305            value = ForwardRef(
+2306                value,
+2307                is_argument=not isinstance(obj, types.ModuleType),
+2308                is_class=False,
+2309            )
+2310        hints[name] = _eval_type(value, globalns, localns, type_params)
+2311    return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
+2312
+2313
+2314def _strip_annotations(t):
+2315    """Strip the annotations from a given type."""
+2316    if isinstance(t, _AnnotatedAlias):
+2317        return _strip_annotations(t.__origin__)
+2318    if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired):
+2319        return _strip_annotations(t.__args__[0])
+2320    if isinstance(t, _GenericAlias):
+2321        stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+2322        if stripped_args == t.__args__:
+2323            return t
+2324        return t.copy_with(stripped_args)
+2325    if isinstance(t, GenericAlias):
+2326        stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+2327        if stripped_args == t.__args__:
+2328            return t
+2329        return GenericAlias(t.__origin__, stripped_args)
+2330    if isinstance(t, types.UnionType):
+2331        stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+2332        if stripped_args == t.__args__:
+2333            return t
+2334        return functools.reduce(operator.or_, stripped_args)
 2335
-2336    Examples::
+2336    return t
 2337
-2338        >>> P = ParamSpec('P')
-2339        >>> assert get_origin(Literal[42]) is Literal
-2340        >>> assert get_origin(int) is None
-2341        >>> assert get_origin(ClassVar[int]) is ClassVar
-2342        >>> assert get_origin(Generic) is Generic
-2343        >>> assert get_origin(Generic[T]) is Generic
-2344        >>> assert get_origin(Union[T, int]) is Union
-2345        >>> assert get_origin(List[Tuple[T, T]][int]) is list
-2346        >>> assert get_origin(P.args) is P
-2347    """
-2348    if isinstance(tp, _AnnotatedAlias):
-2349        return Annotated
-2350    if isinstance(tp, (_BaseGenericAlias, GenericAlias,
-2351                       ParamSpecArgs, ParamSpecKwargs)):
-2352        return tp.__origin__
-2353    if tp is Generic:
-2354        return Generic
-2355    if isinstance(tp, types.UnionType):
-2356        return types.UnionType
-2357    return None
-2358
-2359
-2360def get_args(tp):
-2361    """Get type arguments with all substitutions performed.
-2362
-2363    For unions, basic simplifications used by Union constructor are performed.
-2364
-2365    Examples::
-2366
-2367        >>> T = TypeVar('T')
-2368        >>> assert get_args(Dict[str, int]) == (str, int)
-2369        >>> assert get_args(int) == ()
-2370        >>> assert get_args(Union[int, Union[T, int], str][int]) == (int, str)
-2371        >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
-2372        >>> assert get_args(Callable[[], T][int]) == ([], int)
-2373    """
-2374    if isinstance(tp, _AnnotatedAlias):
-2375        return (tp.__origin__,) + tp.__metadata__
-2376    if isinstance(tp, (_GenericAlias, GenericAlias)):
-2377        res = tp.__args__
-2378        if _should_unflatten_callable_args(tp, res):
-2379            res = (list(res[:-1]), res[-1])
-2380        return res
-2381    if isinstance(tp, types.UnionType):
-2382        return tp.__args__
-2383    return ()
-2384
-2385
-2386def is_typeddict(tp):
-2387    """Check if an annotation is a TypedDict class.
-2388
-2389    For example::
-2390
-2391        >>> from typing import TypedDict
-2392        >>> class Film(TypedDict):
-2393        ...     title: str
-2394        ...     year: int
-2395        ...
-2396        >>> is_typeddict(Film)
-2397        True
-2398        >>> is_typeddict(dict)
-2399        False
-2400    """
-2401    return isinstance(tp, _TypedDictMeta)
-2402
-2403
-2404_ASSERT_NEVER_REPR_MAX_LENGTH = 100
-2405
-2406
-2407def assert_never(arg: Never, /) -> Never:
-2408    """Statically assert that a line of code is unreachable.
-2409
-2410    Example::
+2338
+2339def get_origin(tp):
+2340    """Get the unsubscripted version of a type.
+2341
+2342    This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar,
+2343    Annotated, and others. Return None for unsupported types.
+2344
+2345    Examples::
+2346
+2347        >>> P = ParamSpec('P')
+2348        >>> assert get_origin(Literal[42]) is Literal
+2349        >>> assert get_origin(int) is None
+2350        >>> assert get_origin(ClassVar[int]) is ClassVar
+2351        >>> assert get_origin(Generic) is Generic
+2352        >>> assert get_origin(Generic[T]) is Generic
+2353        >>> assert get_origin(Union[T, int]) is Union
+2354        >>> assert get_origin(List[Tuple[T, T]][int]) is list
+2355        >>> assert get_origin(P.args) is P
+2356    """
+2357    if isinstance(tp, _AnnotatedAlias):
+2358        return Annotated
+2359    if isinstance(tp, (_BaseGenericAlias, GenericAlias,
+2360                       ParamSpecArgs, ParamSpecKwargs)):
+2361        return tp.__origin__
+2362    if tp is Generic:
+2363        return Generic
+2364    if isinstance(tp, types.UnionType):
+2365        return types.UnionType
+2366    return None
+2367
+2368
+2369def get_args(tp):
+2370    """Get type arguments with all substitutions performed.
+2371
+2372    For unions, basic simplifications used by Union constructor are performed.
+2373
+2374    Examples::
+2375
+2376        >>> T = TypeVar('T')
+2377        >>> assert get_args(Dict[str, int]) == (str, int)
+2378        >>> assert get_args(int) == ()
+2379        >>> assert get_args(Union[int, Union[T, int], str][int]) == (int, str)
+2380        >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
+2381        >>> assert get_args(Callable[[], T][int]) == ([], int)
+2382    """
+2383    if isinstance(tp, _AnnotatedAlias):
+2384        return (tp.__origin__,) + tp.__metadata__
+2385    if isinstance(tp, (_GenericAlias, GenericAlias)):
+2386        res = tp.__args__
+2387        if _should_unflatten_callable_args(tp, res):
+2388            res = (list(res[:-1]), res[-1])
+2389        return res
+2390    if isinstance(tp, types.UnionType):
+2391        return tp.__args__
+2392    return ()
+2393
+2394
+2395def is_typeddict(tp):
+2396    """Check if an annotation is a TypedDict class.
+2397
+2398    For example::
+2399
+2400        >>> from typing import TypedDict
+2401        >>> class Film(TypedDict):
+2402        ...     title: str
+2403        ...     year: int
+2404        ...
+2405        >>> is_typeddict(Film)
+2406        True
+2407        >>> is_typeddict(dict)
+2408        False
+2409    """
+2410    return isinstance(tp, _TypedDictMeta)
 2411
-2412        def int_or_str(arg: int | str) -> None:
-2413            match arg:
-2414                case int():
-2415                    print("It's an int")
-2416                case str():
-2417                    print("It's a str")
-2418                case _:
-2419                    assert_never(arg)
+2412
+2413_ASSERT_NEVER_REPR_MAX_LENGTH = 100
+2414
+2415
+2416def assert_never(arg: Never, /) -> Never:
+2417    """Statically assert that a line of code is unreachable.
+2418
+2419    Example::
 2420
-2421    If a type checker finds that a call to assert_never() is
-2422    reachable, it will emit an error.
-2423
-2424    At runtime, this throws an exception when called.
-2425    """
-2426    value = repr(arg)
-2427    if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
-2428        value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
-2429    raise AssertionError(f"Expected code to be unreachable, but got: {value}")
-2430
-2431
-2432def no_type_check(arg):
-2433    """Decorator to indicate that annotations are not type hints.
-2434
-2435    The argument must be a class or function; if it is a class, it
-2436    applies recursively to all methods and classes defined in that class
-2437    (but not to methods defined in its superclasses or subclasses).
-2438
-2439    This mutates the function(s) or class(es) in place.
-2440    """
-2441    if isinstance(arg, type):
-2442        for key in dir(arg):
-2443            obj = getattr(arg, key)
-2444            if (
-2445                not hasattr(obj, '__qualname__')
-2446                or obj.__qualname__ != f'{arg.__qualname__}.{obj.__name__}'
-2447                or getattr(obj, '__module__', None) != arg.__module__
-2448            ):
-2449                # We only modify objects that are defined in this type directly.
-2450                # If classes / methods are nested in multiple layers,
-2451                # we will modify them when processing their direct holders.
-2452                continue
-2453            # Instance, class, and static methods:
-2454            if isinstance(obj, types.FunctionType):
-2455                obj.__no_type_check__ = True
-2456            if isinstance(obj, types.MethodType):
-2457                obj.__func__.__no_type_check__ = True
-2458            # Nested types:
-2459            if isinstance(obj, type):
-2460                no_type_check(obj)
-2461    try:
-2462        arg.__no_type_check__ = True
-2463    except TypeError:  # built-in classes
-2464        pass
-2465    return arg
-2466
-2467
-2468def no_type_check_decorator(decorator):
-2469    """Decorator to give another decorator the @no_type_check effect.
-2470
-2471    This wraps the decorator with something that wraps the decorated
-2472    function in @no_type_check.
-2473    """
-2474    @functools.wraps(decorator)
-2475    def wrapped_decorator(*args, **kwds):
-2476        func = decorator(*args, **kwds)
-2477        func = no_type_check(func)
-2478        return func
+2421        def int_or_str(arg: int | str) -> None:
+2422            match arg:
+2423                case int():
+2424                    print("It's an int")
+2425                case str():
+2426                    print("It's a str")
+2427                case _:
+2428                    assert_never(arg)
+2429
+2430    If a type checker finds that a call to assert_never() is
+2431    reachable, it will emit an error.
+2432
+2433    At runtime, this throws an exception when called.
+2434    """
+2435    value = repr(arg)
+2436    if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
+2437        value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
+2438    raise AssertionError(f"Expected code to be unreachable, but got: {value}")
+2439
+2440
+2441def no_type_check(arg):
+2442    """Decorator to indicate that annotations are not type hints.
+2443
+2444    The argument must be a class or function; if it is a class, it
+2445    applies recursively to all methods and classes defined in that class
+2446    (but not to methods defined in its superclasses or subclasses).
+2447
+2448    This mutates the function(s) or class(es) in place.
+2449    """
+2450    if isinstance(arg, type):
+2451        for key in dir(arg):
+2452            obj = getattr(arg, key)
+2453            if (
+2454                not hasattr(obj, '__qualname__')
+2455                or obj.__qualname__ != f'{arg.__qualname__}.{obj.__name__}'
+2456                or getattr(obj, '__module__', None) != arg.__module__
+2457            ):
+2458                # We only modify objects that are defined in this type directly.
+2459                # If classes / methods are nested in multiple layers,
+2460                # we will modify them when processing their direct holders.
+2461                continue
+2462            # Instance, class, and static methods:
+2463            if isinstance(obj, types.FunctionType):
+2464                obj.__no_type_check__ = True
+2465            if isinstance(obj, types.MethodType):
+2466                obj.__func__.__no_type_check__ = True
+2467            # Nested types:
+2468            if isinstance(obj, type):
+2469                no_type_check(obj)
+2470    try:
+2471        arg.__no_type_check__ = True
+2472    except TypeError:  # built-in classes
+2473        pass
+2474    return arg
+2475
+2476
+2477def no_type_check_decorator(decorator):
+2478    """Decorator to give another decorator the @no_type_check effect.
 2479
-2480    return wrapped_decorator
-2481
-2482
-2483def _overload_dummy(*args, **kwds):
-2484    """Helper for @overload to raise when called."""
-2485    raise NotImplementedError(
-2486        "You should not call an overloaded function. "
-2487        "A series of @overload-decorated functions "
-2488        "outside a stub module should always be followed "
-2489        "by an implementation that is not @overload-ed.")
+2480    This wraps the decorator with something that wraps the decorated
+2481    function in @no_type_check.
+2482    """
+2483    @functools.wraps(decorator)
+2484    def wrapped_decorator(*args, **kwds):
+2485        func = decorator(*args, **kwds)
+2486        func = no_type_check(func)
+2487        return func
+2488
+2489    return wrapped_decorator
 2490
 2491
-2492# {module: {qualname: {firstlineno: func}}}
-2493_overload_registry = defaultdict(functools.partial(defaultdict, dict))
-2494
-2495
-2496def overload(func):
-2497    """Decorator for overloaded functions/methods.
-2498
-2499    In a stub file, place two or more stub definitions for the same
-2500    function in a row, each decorated with @overload.
-2501
-2502    For example::
+2492def _overload_dummy(*args, **kwds):
+2493    """Helper for @overload to raise when called."""
+2494    raise NotImplementedError(
+2495        "You should not call an overloaded function. "
+2496        "A series of @overload-decorated functions "
+2497        "outside a stub module should always be followed "
+2498        "by an implementation that is not @overload-ed.")
+2499
+2500
+2501# {module: {qualname: {firstlineno: func}}}
+2502_overload_registry = defaultdict(functools.partial(defaultdict, dict))
 2503
-2504        @overload
-2505        def utf8(value: None) -> None: ...
-2506        @overload
-2507        def utf8(value: bytes) -> bytes: ...
-2508        @overload
-2509        def utf8(value: str) -> bytes: ...
+2504
+2505def overload(func):
+2506    """Decorator for overloaded functions/methods.
+2507
+2508    In a stub file, place two or more stub definitions for the same
+2509    function in a row, each decorated with @overload.
 2510
-2511    In a non-stub file (i.e. a regular .py file), do the same but
-2512    follow it with an implementation.  The implementation should *not*
-2513    be decorated with @overload::
-2514
+2511    For example::
+2512
+2513        @overload
+2514        def utf8(value: None) -> None: ...
 2515        @overload
-2516        def utf8(value: None) -> None: ...
+2516        def utf8(value: bytes) -> bytes: ...
 2517        @overload
-2518        def utf8(value: bytes) -> bytes: ...
-2519        @overload
-2520        def utf8(value: str) -> bytes: ...
-2521        def utf8(value):
-2522            ...  # implementation goes here
+2518        def utf8(value: str) -> bytes: ...
+2519
+2520    In a non-stub file (i.e. a regular .py file), do the same but
+2521    follow it with an implementation.  The implementation should *not*
+2522    be decorated with @overload::
 2523
-2524    The overloads for a function can be retrieved at runtime using the
-2525    get_overloads() function.
-2526    """
-2527    # classmethod and staticmethod
-2528    f = getattr(func, "__func__", func)
-2529    try:
-2530        _overload_registry[f.__module__][f.__qualname__][f.__code__.co_firstlineno] = func
-2531    except AttributeError:
-2532        # Not a normal function; ignore.
-2533        pass
-2534    return _overload_dummy
-2535
-2536
-2537def get_overloads(func):
-2538    """Return all defined overloads for *func* as a sequence."""
-2539    # classmethod and staticmethod
-2540    f = getattr(func, "__func__", func)
-2541    if f.__module__ not in _overload_registry:
-2542        return []
-2543    mod_dict = _overload_registry[f.__module__]
-2544    if f.__qualname__ not in mod_dict:
-2545        return []
-2546    return list(mod_dict[f.__qualname__].values())
-2547
-2548
-2549def clear_overloads():
-2550    """Clear all overloads in the registry."""
-2551    _overload_registry.clear()
-2552
-2553
-2554def final(f):
-2555    """Decorator to indicate final methods and final classes.
+2524        @overload
+2525        def utf8(value: None) -> None: ...
+2526        @overload
+2527        def utf8(value: bytes) -> bytes: ...
+2528        @overload
+2529        def utf8(value: str) -> bytes: ...
+2530        def utf8(value):
+2531            ...  # implementation goes here
+2532
+2533    The overloads for a function can be retrieved at runtime using the
+2534    get_overloads() function.
+2535    """
+2536    # classmethod and staticmethod
+2537    f = getattr(func, "__func__", func)
+2538    try:
+2539        _overload_registry[f.__module__][f.__qualname__][f.__code__.co_firstlineno] = func
+2540    except AttributeError:
+2541        # Not a normal function; ignore.
+2542        pass
+2543    return _overload_dummy
+2544
+2545
+2546def get_overloads(func):
+2547    """Return all defined overloads for *func* as a sequence."""
+2548    # classmethod and staticmethod
+2549    f = getattr(func, "__func__", func)
+2550    if f.__module__ not in _overload_registry:
+2551        return []
+2552    mod_dict = _overload_registry[f.__module__]
+2553    if f.__qualname__ not in mod_dict:
+2554        return []
+2555    return list(mod_dict[f.__qualname__].values())
 2556
-2557    Use this decorator to indicate to type checkers that the decorated
-2558    method cannot be overridden, and decorated class cannot be subclassed.
-2559
-2560    For example::
+2557
+2558def clear_overloads():
+2559    """Clear all overloads in the registry."""
+2560    _overload_registry.clear()
 2561
-2562        class Base:
-2563            @final
-2564            def done(self) -> None:
-2565                ...
-2566        class Sub(Base):
-2567            def done(self) -> None:  # Error reported by type checker
-2568                ...
-2569
-2570        @final
-2571        class Leaf:
-2572            ...
-2573        class Other(Leaf):  # Error reported by type checker
-2574            ...
-2575
-2576    There is no runtime checking of these properties. The decorator
-2577    attempts to set the ``__final__`` attribute to ``True`` on the decorated
-2578    object to allow runtime introspection.
-2579    """
-2580    try:
-2581        f.__final__ = True
-2582    except (AttributeError, TypeError):
-2583        # Skip the attribute silently if it is not writable.
-2584        # AttributeError happens if the object has __slots__ or a
-2585        # read-only property, TypeError if it's a builtin class.
-2586        pass
-2587    return f
-2588
-2589
-2590# Some unconstrained type variables.  These were initially used by the container types.
-2591# They were never meant for export and are now unused, but we keep them around to
-2592# avoid breaking compatibility with users who import them.
-2593T = TypeVar('T')  # Any type.
-2594KT = TypeVar('KT')  # Key type.
-2595VT = TypeVar('VT')  # Value type.
-2596T_co = TypeVar('T_co', covariant=True)  # Any type covariant containers.
-2597V_co = TypeVar('V_co', covariant=True)  # Any type covariant containers.
-2598VT_co = TypeVar('VT_co', covariant=True)  # Value type covariant containers.
-2599T_contra = TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
-2600# Internal type variable used for Type[].
-2601CT_co = TypeVar('CT_co', covariant=True, bound=type)
-2602
-2603
-2604# A useful type variable with constraints.  This represents string types.
-2605# (This one *is* for export!)
-2606AnyStr = TypeVar('AnyStr', bytes, str)
-2607
-2608
-2609# Various ABCs mimicking those in collections.abc.
-2610_alias = _SpecialGenericAlias
+2562
+2563def final(f):
+2564    """Decorator to indicate final methods and final classes.
+2565
+2566    Use this decorator to indicate to type checkers that the decorated
+2567    method cannot be overridden, and decorated class cannot be subclassed.
+2568
+2569    For example::
+2570
+2571        class Base:
+2572            @final
+2573            def done(self) -> None:
+2574                ...
+2575        class Sub(Base):
+2576            def done(self) -> None:  # Error reported by type checker
+2577                ...
+2578
+2579        @final
+2580        class Leaf:
+2581            ...
+2582        class Other(Leaf):  # Error reported by type checker
+2583            ...
+2584
+2585    There is no runtime checking of these properties. The decorator
+2586    attempts to set the ``__final__`` attribute to ``True`` on the decorated
+2587    object to allow runtime introspection.
+2588    """
+2589    try:
+2590        f.__final__ = True
+2591    except (AttributeError, TypeError):
+2592        # Skip the attribute silently if it is not writable.
+2593        # AttributeError happens if the object has __slots__ or a
+2594        # read-only property, TypeError if it's a builtin class.
+2595        pass
+2596    return f
+2597
+2598
+2599# Some unconstrained type variables.  These were initially used by the container types.
+2600# They were never meant for export and are now unused, but we keep them around to
+2601# avoid breaking compatibility with users who import them.
+2602T = TypeVar('T')  # Any type.
+2603KT = TypeVar('KT')  # Key type.
+2604VT = TypeVar('VT')  # Value type.
+2605T_co = TypeVar('T_co', covariant=True)  # Any type covariant containers.
+2606V_co = TypeVar('V_co', covariant=True)  # Any type covariant containers.
+2607VT_co = TypeVar('VT_co', covariant=True)  # Value type covariant containers.
+2608T_contra = TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
+2609# Internal type variable used for Type[].
+2610CT_co = TypeVar('CT_co', covariant=True, bound=type)
 2611
-2612Hashable = _alias(collections.abc.Hashable, 0)  # Not generic.
-2613Awaitable = _alias(collections.abc.Awaitable, 1)
-2614Coroutine = _alias(collections.abc.Coroutine, 3)
-2615AsyncIterable = _alias(collections.abc.AsyncIterable, 1)
-2616AsyncIterator = _alias(collections.abc.AsyncIterator, 1)
-2617Iterable = _alias(collections.abc.Iterable, 1)
-2618Iterator = _alias(collections.abc.Iterator, 1)
-2619Reversible = _alias(collections.abc.Reversible, 1)
-2620Sized = _alias(collections.abc.Sized, 0)  # Not generic.
-2621Container = _alias(collections.abc.Container, 1)
-2622Collection = _alias(collections.abc.Collection, 1)
-2623Callable = _CallableType(collections.abc.Callable, 2)
-2624Callable.__doc__ = \
-2625    """Deprecated alias to collections.abc.Callable.
-2626
-2627    Callable[[int], str] signifies a function that takes a single
-2628    parameter of type int and returns a str.
-2629
-2630    The subscription syntax must always be used with exactly two
-2631    values: the argument list and the return type.
-2632    The argument list must be a list of types, a ParamSpec,
-2633    Concatenate or ellipsis. The return type must be a single type.
-2634
-2635    There is no syntax to indicate optional or keyword arguments;
-2636    such function types are rarely used as callback types.
-2637    """
-2638AbstractSet = _alias(collections.abc.Set, 1, name='AbstractSet')
-2639MutableSet = _alias(collections.abc.MutableSet, 1)
-2640# NOTE: Mapping is only covariant in the value type.
-2641Mapping = _alias(collections.abc.Mapping, 2)
-2642MutableMapping = _alias(collections.abc.MutableMapping, 2)
-2643Sequence = _alias(collections.abc.Sequence, 1)
-2644MutableSequence = _alias(collections.abc.MutableSequence, 1)
-2645ByteString = _DeprecatedGenericAlias(
-2646    collections.abc.ByteString, 0, removal_version=(3, 14)  # Not generic.
-2647)
-2648# Tuple accepts variable number of parameters.
-2649Tuple = _TupleType(tuple, -1, inst=False, name='Tuple')
-2650Tuple.__doc__ = \
-2651    """Deprecated alias to builtins.tuple.
-2652
-2653    Tuple[X, Y] is the cross-product type of X and Y.
-2654
-2655    Example: Tuple[T1, T2] is a tuple of two elements corresponding
-2656    to type variables T1 and T2.  Tuple[int, float, str] is a tuple
-2657    of an int, a float and a string.
-2658
-2659    To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
-2660    """
-2661List = _alias(list, 1, inst=False, name='List')
-2662Deque = _alias(collections.deque, 1, name='Deque')
-2663Set = _alias(set, 1, inst=False, name='Set')
-2664FrozenSet = _alias(frozenset, 1, inst=False, name='FrozenSet')
-2665MappingView = _alias(collections.abc.MappingView, 1)
-2666KeysView = _alias(collections.abc.KeysView, 1)
-2667ItemsView = _alias(collections.abc.ItemsView, 2)
-2668ValuesView = _alias(collections.abc.ValuesView, 1)
-2669ContextManager = _alias(contextlib.AbstractContextManager, 1, name='ContextManager')
-2670AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, 1, name='AsyncContextManager')
-2671Dict = _alias(dict, 2, inst=False, name='Dict')
-2672DefaultDict = _alias(collections.defaultdict, 2, name='DefaultDict')
-2673OrderedDict = _alias(collections.OrderedDict, 2)
-2674Counter = _alias(collections.Counter, 1)
-2675ChainMap = _alias(collections.ChainMap, 2)
-2676Generator = _alias(collections.abc.Generator, 3)
-2677AsyncGenerator = _alias(collections.abc.AsyncGenerator, 2)
-2678Type = _alias(type, 1, inst=False, name='Type')
-2679Type.__doc__ = \
-2680    """Deprecated alias to builtins.type.
-2681
-2682    builtins.type or typing.Type can be used to annotate class objects.
-2683    For example, suppose we have the following classes::
-2684
-2685        class User: ...  # Abstract base for User classes
-2686        class BasicUser(User): ...
-2687        class ProUser(User): ...
-2688        class TeamUser(User): ...
-2689
-2690    And a function that takes a class argument that's a subclass of
-2691    User and returns an instance of the corresponding class::
-2692
-2693        def new_user[U](user_class: Type[U]) -> U:
-2694            user = user_class()
-2695            # (Here we could write the user object to a database)
-2696            return user
-2697
-2698        joe = new_user(BasicUser)
-2699
-2700    At this point the type checker knows that joe has type BasicUser.
-2701    """
-2702
-2703
-2704@runtime_checkable
-2705class SupportsInt(Protocol):
-2706    """An ABC with one abstract method __int__."""
-2707
-2708    __slots__ = ()
-2709
-2710    @abstractmethod
-2711    def __int__(self) -> int:
-2712        pass
-2713
-2714
-2715@runtime_checkable
-2716class SupportsFloat(Protocol):
-2717    """An ABC with one abstract method __float__."""
+2612
+2613# A useful type variable with constraints.  This represents string types.
+2614# (This one *is* for export!)
+2615AnyStr = TypeVar('AnyStr', bytes, str)
+2616
+2617
+2618# Various ABCs mimicking those in collections.abc.
+2619_alias = _SpecialGenericAlias
+2620
+2621Hashable = _alias(collections.abc.Hashable, 0)  # Not generic.
+2622Awaitable = _alias(collections.abc.Awaitable, 1)
+2623Coroutine = _alias(collections.abc.Coroutine, 3)
+2624AsyncIterable = _alias(collections.abc.AsyncIterable, 1)
+2625AsyncIterator = _alias(collections.abc.AsyncIterator, 1)
+2626Iterable = _alias(collections.abc.Iterable, 1)
+2627Iterator = _alias(collections.abc.Iterator, 1)
+2628Reversible = _alias(collections.abc.Reversible, 1)
+2629Sized = _alias(collections.abc.Sized, 0)  # Not generic.
+2630Container = _alias(collections.abc.Container, 1)
+2631Collection = _alias(collections.abc.Collection, 1)
+2632Callable = _CallableType(collections.abc.Callable, 2)
+2633Callable.__doc__ = \
+2634    """Deprecated alias to collections.abc.Callable.
+2635
+2636    Callable[[int], str] signifies a function that takes a single
+2637    parameter of type int and returns a str.
+2638
+2639    The subscription syntax must always be used with exactly two
+2640    values: the argument list and the return type.
+2641    The argument list must be a list of types, a ParamSpec,
+2642    Concatenate or ellipsis. The return type must be a single type.
+2643
+2644    There is no syntax to indicate optional or keyword arguments;
+2645    such function types are rarely used as callback types.
+2646    """
+2647AbstractSet = _alias(collections.abc.Set, 1, name='AbstractSet')
+2648MutableSet = _alias(collections.abc.MutableSet, 1)
+2649# NOTE: Mapping is only covariant in the value type.
+2650Mapping = _alias(collections.abc.Mapping, 2)
+2651MutableMapping = _alias(collections.abc.MutableMapping, 2)
+2652Sequence = _alias(collections.abc.Sequence, 1)
+2653MutableSequence = _alias(collections.abc.MutableSequence, 1)
+2654ByteString = _DeprecatedGenericAlias(
+2655    collections.abc.ByteString, 0, removal_version=(3, 14)  # Not generic.
+2656)
+2657# Tuple accepts variable number of parameters.
+2658Tuple = _TupleType(tuple, -1, inst=False, name='Tuple')
+2659Tuple.__doc__ = \
+2660    """Deprecated alias to builtins.tuple.
+2661
+2662    Tuple[X, Y] is the cross-product type of X and Y.
+2663
+2664    Example: Tuple[T1, T2] is a tuple of two elements corresponding
+2665    to type variables T1 and T2.  Tuple[int, float, str] is a tuple
+2666    of an int, a float and a string.
+2667
+2668    To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
+2669    """
+2670List = _alias(list, 1, inst=False, name='List')
+2671Deque = _alias(collections.deque, 1, name='Deque')
+2672Set = _alias(set, 1, inst=False, name='Set')
+2673FrozenSet = _alias(frozenset, 1, inst=False, name='FrozenSet')
+2674MappingView = _alias(collections.abc.MappingView, 1)
+2675KeysView = _alias(collections.abc.KeysView, 1)
+2676ItemsView = _alias(collections.abc.ItemsView, 2)
+2677ValuesView = _alias(collections.abc.ValuesView, 1)
+2678ContextManager = _alias(contextlib.AbstractContextManager, 1, name='ContextManager')
+2679AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, 1, name='AsyncContextManager')
+2680Dict = _alias(dict, 2, inst=False, name='Dict')
+2681DefaultDict = _alias(collections.defaultdict, 2, name='DefaultDict')
+2682OrderedDict = _alias(collections.OrderedDict, 2)
+2683Counter = _alias(collections.Counter, 1)
+2684ChainMap = _alias(collections.ChainMap, 2)
+2685Generator = _alias(collections.abc.Generator, 3)
+2686AsyncGenerator = _alias(collections.abc.AsyncGenerator, 2)
+2687Type = _alias(type, 1, inst=False, name='Type')
+2688Type.__doc__ = \
+2689    """Deprecated alias to builtins.type.
+2690
+2691    builtins.type or typing.Type can be used to annotate class objects.
+2692    For example, suppose we have the following classes::
+2693
+2694        class User: ...  # Abstract base for User classes
+2695        class BasicUser(User): ...
+2696        class ProUser(User): ...
+2697        class TeamUser(User): ...
+2698
+2699    And a function that takes a class argument that's a subclass of
+2700    User and returns an instance of the corresponding class::
+2701
+2702        def new_user[U](user_class: Type[U]) -> U:
+2703            user = user_class()
+2704            # (Here we could write the user object to a database)
+2705            return user
+2706
+2707        joe = new_user(BasicUser)
+2708
+2709    At this point the type checker knows that joe has type BasicUser.
+2710    """
+2711
+2712
+2713@runtime_checkable
+2714class SupportsInt(Protocol):
+2715    """An ABC with one abstract method __int__."""
+2716
+2717    __slots__ = ()
 2718
-2719    __slots__ = ()
-2720
-2721    @abstractmethod
-2722    def __float__(self) -> float:
-2723        pass
-2724
-2725
-2726@runtime_checkable
-2727class SupportsComplex(Protocol):
-2728    """An ABC with one abstract method __complex__."""
+2719    @abstractmethod
+2720    def __int__(self) -> int:
+2721        pass
+2722
+2723
+2724@runtime_checkable
+2725class SupportsFloat(Protocol):
+2726    """An ABC with one abstract method __float__."""
+2727
+2728    __slots__ = ()
 2729
-2730    __slots__ = ()
-2731
-2732    @abstractmethod
-2733    def __complex__(self) -> complex:
-2734        pass
-2735
-2736
-2737@runtime_checkable
-2738class SupportsBytes(Protocol):
-2739    """An ABC with one abstract method __bytes__."""
+2730    @abstractmethod
+2731    def __float__(self) -> float:
+2732        pass
+2733
+2734
+2735@runtime_checkable
+2736class SupportsComplex(Protocol):
+2737    """An ABC with one abstract method __complex__."""
+2738
+2739    __slots__ = ()
 2740
-2741    __slots__ = ()
-2742
-2743    @abstractmethod
-2744    def __bytes__(self) -> bytes:
-2745        pass
-2746
-2747
-2748@runtime_checkable
-2749class SupportsIndex(Protocol):
-2750    """An ABC with one abstract method __index__."""
+2741    @abstractmethod
+2742    def __complex__(self) -> complex:
+2743        pass
+2744
+2745
+2746@runtime_checkable
+2747class SupportsBytes(Protocol):
+2748    """An ABC with one abstract method __bytes__."""
+2749
+2750    __slots__ = ()
 2751
-2752    __slots__ = ()
-2753
-2754    @abstractmethod
-2755    def __index__(self) -> int:
-2756        pass
-2757
-2758
-2759@runtime_checkable
-2760class SupportsAbs[T](Protocol):
-2761    """An ABC with one abstract method __abs__ that is covariant in its return type."""
+2752    @abstractmethod
+2753    def __bytes__(self) -> bytes:
+2754        pass
+2755
+2756
+2757@runtime_checkable
+2758class SupportsIndex(Protocol):
+2759    """An ABC with one abstract method __index__."""
+2760
+2761    __slots__ = ()
 2762
-2763    __slots__ = ()
-2764
-2765    @abstractmethod
-2766    def __abs__(self) -> T:
-2767        pass
-2768
-2769
-2770@runtime_checkable
-2771class SupportsRound[T](Protocol):
-2772    """An ABC with one abstract method __round__ that is covariant in its return type."""
+2763    @abstractmethod
+2764    def __index__(self) -> int:
+2765        pass
+2766
+2767
+2768@runtime_checkable
+2769class SupportsAbs[T](Protocol):
+2770    """An ABC with one abstract method __abs__ that is covariant in its return type."""
+2771
+2772    __slots__ = ()
 2773
-2774    __slots__ = ()
-2775
-2776    @abstractmethod
-2777    def __round__(self, ndigits: int = 0) -> T:
-2778        pass
-2779
-2780
-2781def _make_nmtuple(name, types, module, defaults = ()):
-2782    fields = [n for n, t in types]
-2783    types = {n: _type_check(t, f"field {n} annotation must be a type")
-2784             for n, t in types}
-2785    nm_tpl = collections.namedtuple(name, fields,
-2786                                    defaults=defaults, module=module)
-2787    nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = types
-2788    return nm_tpl
+2774    @abstractmethod
+2775    def __abs__(self) -> T:
+2776        pass
+2777
+2778
+2779@runtime_checkable
+2780class SupportsRound[T](Protocol):
+2781    """An ABC with one abstract method __round__ that is covariant in its return type."""
+2782
+2783    __slots__ = ()
+2784
+2785    @abstractmethod
+2786    def __round__(self, ndigits: int = 0) -> T:
+2787        pass
+2788
 2789
-2790
-2791# attributes prohibited to set in NamedTuple class syntax
-2792_prohibited = frozenset({'__new__', '__init__', '__slots__', '__getnewargs__',
-2793                         '_fields', '_field_defaults',
-2794                         '_make', '_replace', '_asdict', '_source'})
-2795
-2796_special = frozenset({'__module__', '__name__', '__annotations__'})
-2797
+2790def _make_nmtuple(name, types, module, defaults = ()):
+2791    fields = [n for n, t in types]
+2792    types = {n: _type_check(t, f"field {n} annotation must be a type")
+2793             for n, t in types}
+2794    nm_tpl = collections.namedtuple(name, fields,
+2795                                    defaults=defaults, module=module)
+2796    nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = types
+2797    return nm_tpl
 2798
-2799class NamedTupleMeta(type):
-2800    def __new__(cls, typename, bases, ns):
-2801        assert _NamedTuple in bases
-2802        for base in bases:
-2803            if base is not _NamedTuple and base is not Generic:
-2804                raise TypeError(
-2805                    'can only inherit from a NamedTuple type and Generic')
-2806        bases = tuple(tuple if base is _NamedTuple else base for base in bases)
-2807        types = ns.get('__annotations__', {})
-2808        default_names = []
-2809        for field_name in types:
-2810            if field_name in ns:
-2811                default_names.append(field_name)
-2812            elif default_names:
-2813                raise TypeError(f"Non-default namedtuple field {field_name} "
-2814                                f"cannot follow default field"
-2815                                f"{'s' if len(default_names) > 1 else ''} "
-2816                                f"{', '.join(default_names)}")
-2817        nm_tpl = _make_nmtuple(typename, types.items(),
-2818                               defaults=[ns[n] for n in default_names],
-2819                               module=ns['__module__'])
-2820        nm_tpl.__bases__ = bases
-2821        if Generic in bases:
-2822            class_getitem = _generic_class_getitem
-2823            nm_tpl.__class_getitem__ = classmethod(class_getitem)
-2824        # update from user namespace without overriding special namedtuple attributes
-2825        for key in ns:
-2826            if key in _prohibited:
-2827                raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
-2828            elif key not in _special and key not in nm_tpl._fields:
-2829                setattr(nm_tpl, key, ns[key])
+2799
+2800# attributes prohibited to set in NamedTuple class syntax
+2801_prohibited = frozenset({'__new__', '__init__', '__slots__', '__getnewargs__',
+2802                         '_fields', '_field_defaults',
+2803                         '_make', '_replace', '_asdict', '_source'})
+2804
+2805_special = frozenset({'__module__', '__name__', '__annotations__'})
+2806
+2807
+2808class NamedTupleMeta(type):
+2809    def __new__(cls, typename, bases, ns):
+2810        assert _NamedTuple in bases
+2811        for base in bases:
+2812            if base is not _NamedTuple and base is not Generic:
+2813                raise TypeError(
+2814                    'can only inherit from a NamedTuple type and Generic')
+2815        bases = tuple(tuple if base is _NamedTuple else base for base in bases)
+2816        types = ns.get('__annotations__', {})
+2817        default_names = []
+2818        for field_name in types:
+2819            if field_name in ns:
+2820                default_names.append(field_name)
+2821            elif default_names:
+2822                raise TypeError(f"Non-default namedtuple field {field_name} "
+2823                                f"cannot follow default field"
+2824                                f"{'s' if len(default_names) > 1 else ''} "
+2825                                f"{', '.join(default_names)}")
+2826        nm_tpl = _make_nmtuple(typename, types.items(),
+2827                               defaults=[ns[n] for n in default_names],
+2828                               module=ns['__module__'])
+2829        nm_tpl.__bases__ = bases
 2830        if Generic in bases:
-2831            nm_tpl.__init_subclass__()
-2832        return nm_tpl
-2833
-2834
-2835def NamedTuple(typename, fields=None, /, **kwargs):
-2836    """Typed version of namedtuple.
-2837
-2838    Usage::
-2839
-2840        class Employee(NamedTuple):
-2841            name: str
-2842            id: int
+2831            class_getitem = _generic_class_getitem
+2832            nm_tpl.__class_getitem__ = classmethod(class_getitem)
+2833        # update from user namespace without overriding special namedtuple attributes
+2834        for key in ns:
+2835            if key in _prohibited:
+2836                raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
+2837            elif key not in _special and key not in nm_tpl._fields:
+2838                setattr(nm_tpl, key, ns[key])
+2839        if Generic in bases:
+2840            nm_tpl.__init_subclass__()
+2841        return nm_tpl
+2842
 2843
-2844    This is equivalent to::
-2845
-2846        Employee = collections.namedtuple('Employee', ['name', 'id'])
-2847
-2848    The resulting class has an extra __annotations__ attribute, giving a
-2849    dict that maps field names to types.  (The field names are also in
-2850    the _fields attribute, which is part of the namedtuple API.)
-2851    An alternative equivalent functional syntax is also accepted::
+2844def NamedTuple(typename, fields=None, /, **kwargs):
+2845    """Typed version of namedtuple.
+2846
+2847    Usage::
+2848
+2849        class Employee(NamedTuple):
+2850            name: str
+2851            id: int
 2852
-2853        Employee = NamedTuple('Employee', [('name', str), ('id', int)])
-2854    """
-2855    if fields is None:
-2856        fields = kwargs.items()
-2857    elif kwargs:
-2858        raise TypeError("Either list of fields or keywords"
-2859                        " can be provided to NamedTuple, not both")
-2860    nt = _make_nmtuple(typename, fields, module=_caller())
-2861    nt.__orig_bases__ = (NamedTuple,)
-2862    return nt
-2863
-2864_NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {})
-2865
-2866def _namedtuple_mro_entries(bases):
-2867    assert NamedTuple in bases
-2868    return (_NamedTuple,)
-2869
-2870NamedTuple.__mro_entries__ = _namedtuple_mro_entries
-2871
+2853    This is equivalent to::
+2854
+2855        Employee = collections.namedtuple('Employee', ['name', 'id'])
+2856
+2857    The resulting class has an extra __annotations__ attribute, giving a
+2858    dict that maps field names to types.  (The field names are also in
+2859    the _fields attribute, which is part of the namedtuple API.)
+2860    An alternative equivalent functional syntax is also accepted::
+2861
+2862        Employee = NamedTuple('Employee', [('name', str), ('id', int)])
+2863    """
+2864    if fields is None:
+2865        fields = kwargs.items()
+2866    elif kwargs:
+2867        raise TypeError("Either list of fields or keywords"
+2868                        " can be provided to NamedTuple, not both")
+2869    nt = _make_nmtuple(typename, fields, module=_caller())
+2870    nt.__orig_bases__ = (NamedTuple,)
+2871    return nt
 2872
-2873class _TypedDictMeta(type):
-2874    def __new__(cls, name, bases, ns, total=True):
-2875        """Create a new typed dict class object.
-2876
-2877        This method is called when TypedDict is subclassed,
-2878        or when TypedDict is instantiated. This way
-2879        TypedDict supports all three syntax forms described in its docstring.
-2880        Subclasses and instances of TypedDict return actual dictionaries.
-2881        """
-2882        for base in bases:
-2883            if type(base) is not _TypedDictMeta and base is not Generic:
-2884                raise TypeError('cannot inherit from both a TypedDict type '
-2885                                'and a non-TypedDict base class')
-2886
-2887        if any(issubclass(b, Generic) for b in bases):
-2888            generic_base = (Generic,)
-2889        else:
-2890            generic_base = ()
-2891
-2892        tp_dict = type.__new__(_TypedDictMeta, name, (*generic_base, dict), ns)
-2893
-2894        if not hasattr(tp_dict, '__orig_bases__'):
-2895            tp_dict.__orig_bases__ = bases
-2896
-2897        annotations = {}
-2898        own_annotations = ns.get('__annotations__', {})
-2899        msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
-2900        own_annotations = {
-2901            n: _type_check(tp, msg, module=tp_dict.__module__)
-2902            for n, tp in own_annotations.items()
-2903        }
-2904        required_keys = set()
-2905        optional_keys = set()
-2906
-2907        for base in bases:
-2908            annotations.update(base.__dict__.get('__annotations__', {}))
-2909
-2910            base_required = base.__dict__.get('__required_keys__', set())
-2911            required_keys |= base_required
-2912            optional_keys -= base_required
-2913
-2914            base_optional = base.__dict__.get('__optional_keys__', set())
-2915            required_keys -= base_optional
-2916            optional_keys |= base_optional
-2917
-2918        annotations.update(own_annotations)
-2919        for annotation_key, annotation_type in own_annotations.items():
-2920            annotation_origin = get_origin(annotation_type)
-2921            if annotation_origin is Annotated:
-2922                annotation_args = get_args(annotation_type)
-2923                if annotation_args:
-2924                    annotation_type = annotation_args[0]
-2925                    annotation_origin = get_origin(annotation_type)
+2873_NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {})
+2874
+2875def _namedtuple_mro_entries(bases):
+2876    assert NamedTuple in bases
+2877    return (_NamedTuple,)
+2878
+2879NamedTuple.__mro_entries__ = _namedtuple_mro_entries
+2880
+2881
+2882class _TypedDictMeta(type):
+2883    def __new__(cls, name, bases, ns, total=True):
+2884        """Create a new typed dict class object.
+2885
+2886        This method is called when TypedDict is subclassed,
+2887        or when TypedDict is instantiated. This way
+2888        TypedDict supports all three syntax forms described in its docstring.
+2889        Subclasses and instances of TypedDict return actual dictionaries.
+2890        """
+2891        for base in bases:
+2892            if type(base) is not _TypedDictMeta and base is not Generic:
+2893                raise TypeError('cannot inherit from both a TypedDict type '
+2894                                'and a non-TypedDict base class')
+2895
+2896        if any(issubclass(b, Generic) for b in bases):
+2897            generic_base = (Generic,)
+2898        else:
+2899            generic_base = ()
+2900
+2901        tp_dict = type.__new__(_TypedDictMeta, name, (*generic_base, dict), ns)
+2902
+2903        if not hasattr(tp_dict, '__orig_bases__'):
+2904            tp_dict.__orig_bases__ = bases
+2905
+2906        annotations = {}
+2907        own_annotations = ns.get('__annotations__', {})
+2908        msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+2909        own_annotations = {
+2910            n: _type_check(tp, msg, module=tp_dict.__module__)
+2911            for n, tp in own_annotations.items()
+2912        }
+2913        required_keys = set()
+2914        optional_keys = set()
+2915
+2916        for base in bases:
+2917            annotations.update(base.__dict__.get('__annotations__', {}))
+2918
+2919            base_required = base.__dict__.get('__required_keys__', set())
+2920            required_keys |= base_required
+2921            optional_keys -= base_required
+2922
+2923            base_optional = base.__dict__.get('__optional_keys__', set())
+2924            required_keys -= base_optional
+2925            optional_keys |= base_optional
 2926
-2927            if annotation_origin is Required:
-2928                is_required = True
-2929            elif annotation_origin is NotRequired:
-2930                is_required = False
-2931            else:
-2932                is_required = total
-2933
-2934            if is_required:
-2935                required_keys.add(annotation_key)
-2936                optional_keys.discard(annotation_key)
-2937            else:
-2938                optional_keys.add(annotation_key)
-2939                required_keys.discard(annotation_key)
-2940
-2941        assert required_keys.isdisjoint(optional_keys), (
-2942            f"Required keys overlap with optional keys in {name}:"
-2943            f" {required_keys=}, {optional_keys=}"
-2944        )
-2945        tp_dict.__annotations__ = annotations
-2946        tp_dict.__required_keys__ = frozenset(required_keys)
-2947        tp_dict.__optional_keys__ = frozenset(optional_keys)
-2948        if not hasattr(tp_dict, '__total__'):
-2949            tp_dict.__total__ = total
-2950        return tp_dict
-2951
-2952    __call__ = dict  # static method
-2953
-2954    def __subclasscheck__(cls, other):
-2955        # Typed dicts are only for static structural subtyping.
-2956        raise TypeError('TypedDict does not support instance and class checks')
-2957
-2958    __instancecheck__ = __subclasscheck__
-2959
+2927        annotations.update(own_annotations)
+2928        for annotation_key, annotation_type in own_annotations.items():
+2929            annotation_origin = get_origin(annotation_type)
+2930            if annotation_origin is Annotated:
+2931                annotation_args = get_args(annotation_type)
+2932                if annotation_args:
+2933                    annotation_type = annotation_args[0]
+2934                    annotation_origin = get_origin(annotation_type)
+2935
+2936            if annotation_origin is Required:
+2937                is_required = True
+2938            elif annotation_origin is NotRequired:
+2939                is_required = False
+2940            else:
+2941                is_required = total
+2942
+2943            if is_required:
+2944                required_keys.add(annotation_key)
+2945                optional_keys.discard(annotation_key)
+2946            else:
+2947                optional_keys.add(annotation_key)
+2948                required_keys.discard(annotation_key)
+2949
+2950        assert required_keys.isdisjoint(optional_keys), (
+2951            f"Required keys overlap with optional keys in {name}:"
+2952            f" {required_keys=}, {optional_keys=}"
+2953        )
+2954        tp_dict.__annotations__ = annotations
+2955        tp_dict.__required_keys__ = frozenset(required_keys)
+2956        tp_dict.__optional_keys__ = frozenset(optional_keys)
+2957        if not hasattr(tp_dict, '__total__'):
+2958            tp_dict.__total__ = total
+2959        return tp_dict
 2960
-2961def TypedDict(typename, fields=None, /, *, total=True, **kwargs):
-2962    """A simple typed namespace. At runtime it is equivalent to a plain dict.
-2963
-2964    TypedDict creates a dictionary type such that a type checker will expect all
-2965    instances to have a certain set of keys, where each key is
-2966    associated with a value of a consistent type. This expectation
-2967    is not checked at runtime.
+2961    __call__ = dict  # static method
+2962
+2963    def __subclasscheck__(cls, other):
+2964        # Typed dicts are only for static structural subtyping.
+2965        raise TypeError('TypedDict does not support instance and class checks')
+2966
+2967    __instancecheck__ = __subclasscheck__
 2968
-2969    Usage::
-2970
-2971        >>> class Point2D(TypedDict):
-2972        ...     x: int
-2973        ...     y: int
-2974        ...     label: str
-2975        ...
-2976        >>> a: Point2D = {'x': 1, 'y': 2, 'label': 'good'}  # OK
-2977        >>> b: Point2D = {'z': 3, 'label': 'bad'}           # Fails type check
-2978        >>> Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
-2979        True
-2980
-2981    The type info can be accessed via the Point2D.__annotations__ dict, and
-2982    the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
-2983    TypedDict supports an additional equivalent form::
-2984
-2985        Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
-2986
-2987    By default, all keys must be present in a TypedDict. It is possible
-2988    to override this by specifying totality::
+2969
+2970def TypedDict(typename, fields=None, /, *, total=True, **kwargs):
+2971    """A simple typed namespace. At runtime it is equivalent to a plain dict.
+2972
+2973    TypedDict creates a dictionary type such that a type checker will expect all
+2974    instances to have a certain set of keys, where each key is
+2975    associated with a value of a consistent type. This expectation
+2976    is not checked at runtime.
+2977
+2978    Usage::
+2979
+2980        >>> class Point2D(TypedDict):
+2981        ...     x: int
+2982        ...     y: int
+2983        ...     label: str
+2984        ...
+2985        >>> a: Point2D = {'x': 1, 'y': 2, 'label': 'good'}  # OK
+2986        >>> b: Point2D = {'z': 3, 'label': 'bad'}           # Fails type check
+2987        >>> Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+2988        True
 2989
-2990        class Point2D(TypedDict, total=False):
-2991            x: int
-2992            y: int
+2990    The type info can be accessed via the Point2D.__annotations__ dict, and
+2991    the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+2992    TypedDict supports an additional equivalent form::
 2993
-2994    This means that a Point2D TypedDict can have any of the keys omitted. A type
-2995    checker is only expected to support a literal False or True as the value of
-2996    the total argument. True is the default, and makes all items defined in the
-2997    class body be required.
+2994        Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+2995
+2996    By default, all keys must be present in a TypedDict. It is possible
+2997    to override this by specifying totality::
 2998
-2999    The Required and NotRequired special forms can also be used to mark
-3000    individual keys as being required or not required::
-3001
-3002        class Point2D(TypedDict):
-3003            x: int               # the "x" key must always be present (Required is the default)
-3004            y: NotRequired[int]  # the "y" key can be omitted
-3005
-3006    See PEP 655 for more details on Required and NotRequired.
-3007    """
-3008    if fields is None:
-3009        fields = kwargs
-3010    elif kwargs:
-3011        raise TypeError("TypedDict takes either a dict or keyword arguments,"
-3012                        " but not both")
-3013    if kwargs:
-3014        warnings.warn(
-3015            "The kwargs-based syntax for TypedDict definitions is deprecated "
-3016            "in Python 3.11, will be removed in Python 3.13, and may not be "
-3017            "understood by third-party type checkers.",
-3018            DeprecationWarning,
-3019            stacklevel=2,
-3020        )
-3021
-3022    ns = {'__annotations__': dict(fields)}
-3023    module = _caller()
-3024    if module is not None:
-3025        # Setting correct module is necessary to make typed dict classes pickleable.
-3026        ns['__module__'] = module
-3027
-3028    td = _TypedDictMeta(typename, (), ns, total=total)
-3029    td.__orig_bases__ = (TypedDict,)
-3030    return td
-3031
-3032_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
-3033TypedDict.__mro_entries__ = lambda bases: (_TypedDict,)
-3034
-3035
-3036@_SpecialForm
-3037def Required(self, parameters):
-3038    """Special typing construct to mark a TypedDict key as required.
-3039
-3040    This is mainly useful for total=False TypedDicts.
-3041
-3042    For example::
+2999        class Point2D(TypedDict, total=False):
+3000            x: int
+3001            y: int
+3002
+3003    This means that a Point2D TypedDict can have any of the keys omitted. A type
+3004    checker is only expected to support a literal False or True as the value of
+3005    the total argument. True is the default, and makes all items defined in the
+3006    class body be required.
+3007
+3008    The Required and NotRequired special forms can also be used to mark
+3009    individual keys as being required or not required::
+3010
+3011        class Point2D(TypedDict):
+3012            x: int               # the "x" key must always be present (Required is the default)
+3013            y: NotRequired[int]  # the "y" key can be omitted
+3014
+3015    See PEP 655 for more details on Required and NotRequired.
+3016    """
+3017    if fields is None:
+3018        fields = kwargs
+3019    elif kwargs:
+3020        raise TypeError("TypedDict takes either a dict or keyword arguments,"
+3021                        " but not both")
+3022    if kwargs:
+3023        warnings.warn(
+3024            "The kwargs-based syntax for TypedDict definitions is deprecated "
+3025            "in Python 3.11, will be removed in Python 3.13, and may not be "
+3026            "understood by third-party type checkers.",
+3027            DeprecationWarning,
+3028            stacklevel=2,
+3029        )
+3030
+3031    ns = {'__annotations__': dict(fields)}
+3032    module = _caller()
+3033    if module is not None:
+3034        # Setting correct module is necessary to make typed dict classes pickleable.
+3035        ns['__module__'] = module
+3036
+3037    td = _TypedDictMeta(typename, (), ns, total=total)
+3038    td.__orig_bases__ = (TypedDict,)
+3039    return td
+3040
+3041_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
+3042TypedDict.__mro_entries__ = lambda bases: (_TypedDict,)
 3043
-3044        class Movie(TypedDict, total=False):
-3045            title: Required[str]
-3046            year: int
-3047
-3048        m = Movie(
-3049            title='The Matrix',  # typechecker error if key is omitted
-3050            year=1999,
-3051        )
+3044
+3045@_SpecialForm
+3046def Required(self, parameters):
+3047    """Special typing construct to mark a TypedDict key as required.
+3048
+3049    This is mainly useful for total=False TypedDicts.
+3050
+3051    For example::
 3052
-3053    There is no runtime checking that a required key is actually provided
-3054    when instantiating a related TypedDict.
-3055    """
-3056    item = _type_check(parameters, f'{self._name} accepts only a single type.')
-3057    return _GenericAlias(self, (item,))
-3058
-3059
-3060@_SpecialForm
-3061def NotRequired(self, parameters):
-3062    """Special typing construct to mark a TypedDict key as potentially missing.
-3063
-3064    For example::
-3065
-3066        class Movie(TypedDict):
-3067            title: str
-3068            year: NotRequired[int]
-3069
-3070        m = Movie(
-3071            title='The Matrix',  # typechecker error if key is omitted
-3072            year=1999,
-3073        )
-3074    """
-3075    item = _type_check(parameters, f'{self._name} accepts only a single type.')
-3076    return _GenericAlias(self, (item,))
-3077
+3053        class Movie(TypedDict, total=False):
+3054            title: Required[str]
+3055            year: int
+3056
+3057        m = Movie(
+3058            title='The Matrix',  # typechecker error if key is omitted
+3059            year=1999,
+3060        )
+3061
+3062    There is no runtime checking that a required key is actually provided
+3063    when instantiating a related TypedDict.
+3064    """
+3065    item = _type_check(parameters, f'{self._name} accepts only a single type.')
+3066    return _GenericAlias(self, (item,))
+3067
+3068
+3069@_SpecialForm
+3070def NotRequired(self, parameters):
+3071    """Special typing construct to mark a TypedDict key as potentially missing.
+3072
+3073    For example::
+3074
+3075        class Movie(TypedDict):
+3076            title: str
+3077            year: NotRequired[int]
 3078
-3079class NewType:
-3080    """NewType creates simple unique types with almost zero runtime overhead.
-3081
-3082    NewType(name, tp) is considered a subtype of tp
-3083    by static type checkers. At runtime, NewType(name, tp) returns
-3084    a dummy callable that simply returns its argument.
-3085
-3086    Usage::
+3079        m = Movie(
+3080            title='The Matrix',  # typechecker error if key is omitted
+3081            year=1999,
+3082        )
+3083    """
+3084    item = _type_check(parameters, f'{self._name} accepts only a single type.')
+3085    return _GenericAlias(self, (item,))
+3086
 3087
-3088        UserId = NewType('UserId', int)
-3089
-3090        def name_by_id(user_id: UserId) -> str:
-3091            ...
-3092
-3093        UserId('user')          # Fails type check
+3088class NewType:
+3089    """NewType creates simple unique types with almost zero runtime overhead.
+3090
+3091    NewType(name, tp) is considered a subtype of tp
+3092    by static type checkers. At runtime, NewType(name, tp) returns
+3093    a dummy callable that simply returns its argument.
 3094
-3095        name_by_id(42)          # Fails type check
-3096        name_by_id(UserId(42))  # OK
-3097
-3098        num = UserId(5) + 1     # type: int
-3099    """
-3100
-3101    __call__ = _idfunc
-3102
-3103    def __init__(self, name, tp):
-3104        self.__qualname__ = name
-3105        if '.' in name:
-3106            name = name.rpartition('.')[-1]
-3107        self.__name__ = name
-3108        self.__supertype__ = tp
-3109        def_mod = _caller()
-3110        if def_mod != 'typing':
-3111            self.__module__ = def_mod
-3112
-3113    def __mro_entries__(self, bases):
-3114        # We defined __mro_entries__ to get a better error message
-3115        # if a user attempts to subclass a NewType instance. bpo-46170
-3116        superclass_name = self.__name__
-3117
-3118        class Dummy:
-3119            def __init_subclass__(cls):
-3120                subclass_name = cls.__name__
-3121                raise TypeError(
-3122                    f"Cannot subclass an instance of NewType. Perhaps you were looking for: "
-3123                    f"`{subclass_name} = NewType({subclass_name!r}, {superclass_name})`"
-3124                )
-3125
-3126        return (Dummy,)
-3127
-3128    def __repr__(self):
-3129        return f'{self.__module__}.{self.__qualname__}'
-3130
-3131    def __reduce__(self):
-3132        return self.__qualname__
-3133
-3134    def __or__(self, other):
-3135        return Union[self, other]
+3095    Usage::
+3096
+3097        UserId = NewType('UserId', int)
+3098
+3099        def name_by_id(user_id: UserId) -> str:
+3100            ...
+3101
+3102        UserId('user')          # Fails type check
+3103
+3104        name_by_id(42)          # Fails type check
+3105        name_by_id(UserId(42))  # OK
+3106
+3107        num = UserId(5) + 1     # type: int
+3108    """
+3109
+3110    __call__ = _idfunc
+3111
+3112    def __init__(self, name, tp):
+3113        self.__qualname__ = name
+3114        if '.' in name:
+3115            name = name.rpartition('.')[-1]
+3116        self.__name__ = name
+3117        self.__supertype__ = tp
+3118        def_mod = _caller()
+3119        if def_mod != 'typing':
+3120            self.__module__ = def_mod
+3121
+3122    def __mro_entries__(self, bases):
+3123        # We defined __mro_entries__ to get a better error message
+3124        # if a user attempts to subclass a NewType instance. bpo-46170
+3125        superclass_name = self.__name__
+3126
+3127        class Dummy:
+3128            def __init_subclass__(cls):
+3129                subclass_name = cls.__name__
+3130                raise TypeError(
+3131                    f"Cannot subclass an instance of NewType. Perhaps you were looking for: "
+3132                    f"`{subclass_name} = NewType({subclass_name!r}, {superclass_name})`"
+3133                )
+3134
+3135        return (Dummy,)
 3136
-3137    def __ror__(self, other):
-3138        return Union[other, self]
+3137    def __repr__(self):
+3138        return f'{self.__module__}.{self.__qualname__}'
 3139
-3140
-3141# Python-version-specific alias (Python 2: unicode; Python 3: str)
-3142Text = str
-3143
-3144
-3145# Constant that's True when type checking, but False here.
-3146TYPE_CHECKING = False
-3147
+3140    def __reduce__(self):
+3141        return self.__qualname__
+3142
+3143    def __or__(self, other):
+3144        return Union[self, other]
+3145
+3146    def __ror__(self, other):
+3147        return Union[other, self]
 3148
-3149class IO(Generic[AnyStr]):
-3150    """Generic base class for TextIO and BinaryIO.
-3151
-3152    This is an abstract, generic version of the return of open().
+3149
+3150# Python-version-specific alias (Python 2: unicode; Python 3: str)
+3151Text = str
+3152
 3153
-3154    NOTE: This does not distinguish between the different possible
-3155    classes (text vs. binary, read vs. write vs. read/write,
-3156    append-only, unbuffered).  The TextIO and BinaryIO subclasses
-3157    below capture the distinctions between text vs. binary, which is
-3158    pervasive in the interface; however we currently do not offer a
-3159    way to track the other distinctions in the type system.
-3160    """
-3161
-3162    __slots__ = ()
-3163
-3164    @property
-3165    @abstractmethod
-3166    def mode(self) -> str:
-3167        pass
-3168
-3169    @property
-3170    @abstractmethod
-3171    def name(self) -> str:
-3172        pass
-3173
+3154# Constant that's True when type checking, but False here.
+3155TYPE_CHECKING = False
+3156
+3157
+3158class IO(Generic[AnyStr]):
+3159    """Generic base class for TextIO and BinaryIO.
+3160
+3161    This is an abstract, generic version of the return of open().
+3162
+3163    NOTE: This does not distinguish between the different possible
+3164    classes (text vs. binary, read vs. write vs. read/write,
+3165    append-only, unbuffered).  The TextIO and BinaryIO subclasses
+3166    below capture the distinctions between text vs. binary, which is
+3167    pervasive in the interface; however we currently do not offer a
+3168    way to track the other distinctions in the type system.
+3169    """
+3170
+3171    __slots__ = ()
+3172
+3173    @property
 3174    @abstractmethod
-3175    def close(self) -> None:
+3175    def mode(self) -> str:
 3176        pass
 3177
 3178    @property
 3179    @abstractmethod
-3180    def closed(self) -> bool:
+3180    def name(self) -> str:
 3181        pass
 3182
 3183    @abstractmethod
-3184    def fileno(self) -> int:
+3184    def close(self) -> None:
 3185        pass
 3186
-3187    @abstractmethod
-3188    def flush(self) -> None:
-3189        pass
-3190
-3191    @abstractmethod
-3192    def isatty(self) -> bool:
-3193        pass
-3194
-3195    @abstractmethod
-3196    def read(self, n: int = -1) -> AnyStr:
-3197        pass
-3198
-3199    @abstractmethod
-3200    def readable(self) -> bool:
-3201        pass
-3202
-3203    @abstractmethod
-3204    def readline(self, limit: int = -1) -> AnyStr:
-3205        pass
-3206
-3207    @abstractmethod
-3208    def readlines(self, hint: int = -1) -> List[AnyStr]:
-3209        pass
-3210
-3211    @abstractmethod
-3212    def seek(self, offset: int, whence: int = 0) -> int:
-3213        pass
-3214
-3215    @abstractmethod
-3216    def seekable(self) -> bool:
-3217        pass
-3218
-3219    @abstractmethod
-3220    def tell(self) -> int:
-3221        pass
-3222
-3223    @abstractmethod
-3224    def truncate(self, size: int = None) -> int:
-3225        pass
-3226
-3227    @abstractmethod
-3228    def writable(self) -> bool:
-3229        pass
-3230
-3231    @abstractmethod
-3232    def write(self, s: AnyStr) -> int:
-3233        pass
-3234
-3235    @abstractmethod
-3236    def writelines(self, lines: List[AnyStr]) -> None:
-3237        pass
-3238
-3239    @abstractmethod
-3240    def __enter__(self) -> 'IO[AnyStr]':
-3241        pass
-3242
-3243    @abstractmethod
-3244    def __exit__(self, type, value, traceback) -> None:
-3245        pass
-3246
+3187    @property
+3188    @abstractmethod
+3189    def closed(self) -> bool:
+3190        pass
+3191
+3192    @abstractmethod
+3193    def fileno(self) -> int:
+3194        pass
+3195
+3196    @abstractmethod
+3197    def flush(self) -> None:
+3198        pass
+3199
+3200    @abstractmethod
+3201    def isatty(self) -> bool:
+3202        pass
+3203
+3204    @abstractmethod
+3205    def read(self, n: int = -1) -> AnyStr:
+3206        pass
+3207
+3208    @abstractmethod
+3209    def readable(self) -> bool:
+3210        pass
+3211
+3212    @abstractmethod
+3213    def readline(self, limit: int = -1) -> AnyStr:
+3214        pass
+3215
+3216    @abstractmethod
+3217    def readlines(self, hint: int = -1) -> List[AnyStr]:
+3218        pass
+3219
+3220    @abstractmethod
+3221    def seek(self, offset: int, whence: int = 0) -> int:
+3222        pass
+3223
+3224    @abstractmethod
+3225    def seekable(self) -> bool:
+3226        pass
+3227
+3228    @abstractmethod
+3229    def tell(self) -> int:
+3230        pass
+3231
+3232    @abstractmethod
+3233    def truncate(self, size: int = None) -> int:
+3234        pass
+3235
+3236    @abstractmethod
+3237    def writable(self) -> bool:
+3238        pass
+3239
+3240    @abstractmethod
+3241    def write(self, s: AnyStr) -> int:
+3242        pass
+3243
+3244    @abstractmethod
+3245    def writelines(self, lines: List[AnyStr]) -> None:
+3246        pass
 3247
-3248class BinaryIO(IO[bytes]):
-3249    """Typed version of the return of open() in binary mode."""
-3250
-3251    __slots__ = ()
-3252
-3253    @abstractmethod
-3254    def write(self, s: Union[bytes, bytearray]) -> int:
-3255        pass
+3248    @abstractmethod
+3249    def __enter__(self) -> 'IO[AnyStr]':
+3250        pass
+3251
+3252    @abstractmethod
+3253    def __exit__(self, type, value, traceback) -> None:
+3254        pass
+3255
 3256
-3257    @abstractmethod
-3258    def __enter__(self) -> 'BinaryIO':
-3259        pass
-3260
+3257class BinaryIO(IO[bytes]):
+3258    """Typed version of the return of open() in binary mode."""
+3259
+3260    __slots__ = ()
 3261
-3262class TextIO(IO[str]):
-3263    """Typed version of the return of open() in text mode."""
-3264
-3265    __slots__ = ()
-3266
-3267    @property
-3268    @abstractmethod
-3269    def buffer(self) -> BinaryIO:
-3270        pass
-3271
-3272    @property
-3273    @abstractmethod
-3274    def encoding(self) -> str:
-3275        pass
-3276
-3277    @property
-3278    @abstractmethod
-3279    def errors(self) -> Optional[str]:
-3280        pass
-3281
-3282    @property
-3283    @abstractmethod
-3284    def line_buffering(self) -> bool:
-3285        pass
-3286
-3287    @property
-3288    @abstractmethod
-3289    def newlines(self) -> Any:
-3290        pass
-3291
+3262    @abstractmethod
+3263    def write(self, s: Union[bytes, bytearray]) -> int:
+3264        pass
+3265
+3266    @abstractmethod
+3267    def __enter__(self) -> 'BinaryIO':
+3268        pass
+3269
+3270
+3271class TextIO(IO[str]):
+3272    """Typed version of the return of open() in text mode."""
+3273
+3274    __slots__ = ()
+3275
+3276    @property
+3277    @abstractmethod
+3278    def buffer(self) -> BinaryIO:
+3279        pass
+3280
+3281    @property
+3282    @abstractmethod
+3283    def encoding(self) -> str:
+3284        pass
+3285
+3286    @property
+3287    @abstractmethod
+3288    def errors(self) -> Optional[str]:
+3289        pass
+3290
+3291    @property
 3292    @abstractmethod
-3293    def __enter__(self) -> 'TextIO':
+3293    def line_buffering(self) -> bool:
 3294        pass
 3295
-3296
-3297class _DeprecatedType(type):
-3298    def __getattribute__(cls, name):
-3299        if name not in {"__dict__", "__module__", "__doc__"} and name in cls.__dict__:
-3300            warnings.warn(
-3301                f"{cls.__name__} is deprecated, import directly "
-3302                f"from typing instead. {cls.__name__} will be removed "
-3303                "in Python 3.13.",
-3304                DeprecationWarning,
-3305                stacklevel=2,
-3306            )
-3307        return super().__getattribute__(name)
-3308
-3309
-3310class io(metaclass=_DeprecatedType):
-3311    """Wrapper namespace for IO generic classes."""
-3312
-3313    __all__ = ['IO', 'TextIO', 'BinaryIO']
-3314    IO = IO
-3315    TextIO = TextIO
-3316    BinaryIO = BinaryIO
+3296    @property
+3297    @abstractmethod
+3298    def newlines(self) -> Any:
+3299        pass
+3300
+3301    @abstractmethod
+3302    def __enter__(self) -> 'TextIO':
+3303        pass
+3304
+3305
+3306class _DeprecatedType(type):
+3307    def __getattribute__(cls, name):
+3308        if name not in {"__dict__", "__module__", "__doc__"} and name in cls.__dict__:
+3309            warnings.warn(
+3310                f"{cls.__name__} is deprecated, import directly "
+3311                f"from typing instead. {cls.__name__} will be removed "
+3312                "in Python 3.13.",
+3313                DeprecationWarning,
+3314                stacklevel=2,
+3315            )
+3316        return super().__getattribute__(name)
 3317
 3318
-3319io.__name__ = __name__ + '.io'
-3320sys.modules[io.__name__] = io
+3319class io(metaclass=_DeprecatedType):
+3320    """Wrapper namespace for IO generic classes."""
 3321
-3322Pattern = _alias(stdlib_re.Pattern, 1)
-3323Match = _alias(stdlib_re.Match, 1)
-3324
-3325class re(metaclass=_DeprecatedType):
-3326    """Wrapper namespace for re type aliases."""
+3322    __all__ = ['IO', 'TextIO', 'BinaryIO']
+3323    IO = IO
+3324    TextIO = TextIO
+3325    BinaryIO = BinaryIO
+3326
 3327
-3328    __all__ = ['Pattern', 'Match']
-3329    Pattern = Pattern
-3330    Match = Match
-3331
-3332
-3333re.__name__ = __name__ + '.re'
-3334sys.modules[re.__name__] = re
-3335
+3328io.__name__ = __name__ + '.io'
+3329sys.modules[io.__name__] = io
+3330
+3331Pattern = _alias(stdlib_re.Pattern, 1)
+3332Match = _alias(stdlib_re.Match, 1)
+3333
+3334class re(metaclass=_DeprecatedType):
+3335    """Wrapper namespace for re type aliases."""
 3336
-3337def reveal_type[T](obj: T, /) -> T:
-3338    """Ask a static type checker to reveal the inferred type of an expression.
-3339
-3340    When a static type checker encounters a call to ``reveal_type()``,
-3341    it will emit the inferred type of the argument::
-3342
-3343        x: int = 1
-3344        reveal_type(x)
+3337    __all__ = ['Pattern', 'Match']
+3338    Pattern = Pattern
+3339    Match = Match
+3340
+3341
+3342re.__name__ = __name__ + '.re'
+3343sys.modules[re.__name__] = re
+3344
 3345
-3346    Running a static type checker (e.g., mypy) on this example
-3347    will produce output similar to 'Revealed type is "builtins.int"'.
+3346def reveal_type[T](obj: T, /) -> T:
+3347    """Ask a static type checker to reveal the inferred type of an expression.
 3348
-3349    At runtime, the function prints the runtime type of the
-3350    argument and returns the argument unchanged.
-3351    """
-3352    print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
-3353    return obj
+3349    When a static type checker encounters a call to ``reveal_type()``,
+3350    it will emit the inferred type of the argument::
+3351
+3352        x: int = 1
+3353        reveal_type(x)
 3354
-3355
-3356class _IdentityCallable(Protocol):
-3357    def __call__[T](self, arg: T, /) -> T:
-3358        ...
-3359
-3360
-3361def dataclass_transform(
-3362    *,
-3363    eq_default: bool = True,
-3364    order_default: bool = False,
-3365    kw_only_default: bool = False,
-3366    frozen_default: bool = False,
-3367    field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (),
-3368    **kwargs: Any,
-3369) -> _IdentityCallable:
-3370    """Decorator to mark an object as providing dataclass-like behaviour.
-3371
-3372    The decorator can be applied to a function, class, or metaclass.
-3373
-3374    Example usage with a decorator function::
-3375
-3376        @dataclass_transform()
-3377        def create_model[T](cls: type[T]) -> type[T]:
-3378            ...
-3379            return cls
+3355    Running a static type checker (e.g., mypy) on this example
+3356    will produce output similar to 'Revealed type is "builtins.int"'.
+3357
+3358    At runtime, the function prints the runtime type of the
+3359    argument and returns the argument unchanged.
+3360    """
+3361    print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
+3362    return obj
+3363
+3364
+3365class _IdentityCallable(Protocol):
+3366    def __call__[T](self, arg: T, /) -> T:
+3367        ...
+3368
+3369
+3370def dataclass_transform(
+3371    *,
+3372    eq_default: bool = True,
+3373    order_default: bool = False,
+3374    kw_only_default: bool = False,
+3375    frozen_default: bool = False,
+3376    field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (),
+3377    **kwargs: Any,
+3378) -> _IdentityCallable:
+3379    """Decorator to mark an object as providing dataclass-like behaviour.
 3380
-3381        @create_model
-3382        class CustomerModel:
-3383            id: int
-3384            name: str
-3385
-3386    On a base class::
-3387
-3388        @dataclass_transform()
-3389        class ModelBase: ...
-3390
-3391        class CustomerModel(ModelBase):
+3381    The decorator can be applied to a function, class, or metaclass.
+3382
+3383    Example usage with a decorator function::
+3384
+3385        @dataclass_transform()
+3386        def create_model[T](cls: type[T]) -> type[T]:
+3387            ...
+3388            return cls
+3389
+3390        @create_model
+3391        class CustomerModel:
 3392            id: int
 3393            name: str
 3394
-3395    On a metaclass::
+3395    On a base class::
 3396
 3397        @dataclass_transform()
-3398        class ModelMeta(type): ...
+3398        class ModelBase: ...
 3399
-3400        class ModelBase(metaclass=ModelMeta): ...
-3401
-3402        class CustomerModel(ModelBase):
-3403            id: int
-3404            name: str
+3400        class CustomerModel(ModelBase):
+3401            id: int
+3402            name: str
+3403
+3404    On a metaclass::
 3405
-3406    The ``CustomerModel`` classes defined above will
-3407    be treated by type checkers similarly to classes created with
-3408    ``@dataclasses.dataclass``.
-3409    For example, type checkers will assume these classes have
-3410    ``__init__`` methods that accept ``id`` and ``name``.
-3411
-3412    The arguments to this decorator can be used to customize this behavior:
-3413    - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
-3414        ``True`` or ``False`` if it is omitted by the caller.
-3415    - ``order_default`` indicates whether the ``order`` parameter is
-3416        assumed to be True or False if it is omitted by the caller.
-3417    - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
-3418        assumed to be True or False if it is omitted by the caller.
-3419    - ``frozen_default`` indicates whether the ``frozen`` parameter is
-3420        assumed to be True or False if it is omitted by the caller.
-3421    - ``field_specifiers`` specifies a static list of supported classes
-3422        or functions that describe fields, similar to ``dataclasses.field()``.
-3423    - Arbitrary other keyword arguments are accepted in order to allow for
-3424        possible future extensions.
-3425
-3426    At runtime, this decorator records its arguments in the
-3427    ``__dataclass_transform__`` attribute on the decorated object.
-3428    It has no other runtime effect.
-3429
-3430    See PEP 681 for more details.
-3431    """
-3432    def decorator(cls_or_fn):
-3433        cls_or_fn.__dataclass_transform__ = {
-3434            "eq_default": eq_default,
-3435            "order_default": order_default,
-3436            "kw_only_default": kw_only_default,
-3437            "frozen_default": frozen_default,
-3438            "field_specifiers": field_specifiers,
-3439            "kwargs": kwargs,
-3440        }
-3441        return cls_or_fn
-3442    return decorator
-3443
-3444
-3445type _Func = Callable[..., Any]
-3446
-3447
-3448def override[F: _Func](method: F, /) -> F:
-3449    """Indicate that a method is intended to override a method in a base class.
-3450
-3451    Usage::
+3406        @dataclass_transform()
+3407        class ModelMeta(type): ...
+3408
+3409        class ModelBase(metaclass=ModelMeta): ...
+3410
+3411        class CustomerModel(ModelBase):
+3412            id: int
+3413            name: str
+3414
+3415    The ``CustomerModel`` classes defined above will
+3416    be treated by type checkers similarly to classes created with
+3417    ``@dataclasses.dataclass``.
+3418    For example, type checkers will assume these classes have
+3419    ``__init__`` methods that accept ``id`` and ``name``.
+3420
+3421    The arguments to this decorator can be used to customize this behavior:
+3422    - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
+3423        ``True`` or ``False`` if it is omitted by the caller.
+3424    - ``order_default`` indicates whether the ``order`` parameter is
+3425        assumed to be True or False if it is omitted by the caller.
+3426    - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
+3427        assumed to be True or False if it is omitted by the caller.
+3428    - ``frozen_default`` indicates whether the ``frozen`` parameter is
+3429        assumed to be True or False if it is omitted by the caller.
+3430    - ``field_specifiers`` specifies a static list of supported classes
+3431        or functions that describe fields, similar to ``dataclasses.field()``.
+3432    - Arbitrary other keyword arguments are accepted in order to allow for
+3433        possible future extensions.
+3434
+3435    At runtime, this decorator records its arguments in the
+3436    ``__dataclass_transform__`` attribute on the decorated object.
+3437    It has no other runtime effect.
+3438
+3439    See PEP 681 for more details.
+3440    """
+3441    def decorator(cls_or_fn):
+3442        cls_or_fn.__dataclass_transform__ = {
+3443            "eq_default": eq_default,
+3444            "order_default": order_default,
+3445            "kw_only_default": kw_only_default,
+3446            "frozen_default": frozen_default,
+3447            "field_specifiers": field_specifiers,
+3448            "kwargs": kwargs,
+3449        }
+3450        return cls_or_fn
+3451    return decorator
 3452
-3453        class Base:
-3454            def method(self) -> None:
-3455                pass
+3453
+3454type _Func = Callable[..., Any]
+3455
 3456
-3457        class Child(Base):
-3458            @override
-3459            def method(self) -> None:
-3460                super().method()
+3457def override[F: _Func](method: F, /) -> F:
+3458    """Indicate that a method is intended to override a method in a base class.
+3459
+3460    Usage::
 3461
-3462    When this decorator is applied to a method, the type checker will
-3463    validate that it overrides a method or attribute with the same name on a
-3464    base class.  This helps prevent bugs that may occur when a base class is
-3465    changed without an equivalent change to a child class.
-3466
-3467    There is no runtime checking of this property. The decorator attempts to
-3468    set the ``__override__`` attribute to ``True`` on the decorated object to
-3469    allow runtime introspection.
+3462        class Base:
+3463            def method(self) -> None:
+3464                pass
+3465
+3466        class Child(Base):
+3467            @override
+3468            def method(self) -> None:
+3469                super().method()
 3470
-3471    See PEP 698 for details.
-3472    """
-3473    try:
-3474        method.__override__ = True
-3475    except (AttributeError, TypeError):
-3476        # Skip the attribute silently if it is not writable.
-3477        # AttributeError happens if the object has __slots__ or a
-3478        # read-only property, TypeError if it's a builtin class.
-3479        pass
-3480    return method
+3471    When this decorator is applied to a method, the type checker will
+3472    validate that it overrides a method or attribute with the same name on a
+3473    base class.  This helps prevent bugs that may occur when a base class is
+3474    changed without an equivalent change to a child class.
+3475
+3476    There is no runtime checking of this property. The decorator attempts to
+3477    set the ``__override__`` attribute to ``True`` on the decorated object to
+3478    allow runtime introspection.
+3479
+3480    See PEP 698 for details.
+3481    """
+3482    try:
+3483        method.__override__ = True
+3484    except (AttributeError, TypeError):
+3485        # Skip the attribute silently if it is not writable.
+3486        # AttributeError happens if the object has __slots__ or a
+3487        # read-only property, TypeError if it's a builtin class.
+3488        pass
+3489    return method
 
diff --git a/_parse_plugins/plugins.db b/_parse_plugins/plugins.db index d23e375a..303d4650 100644 Binary files a/_parse_plugins/plugins.db and b/_parse_plugins/plugins.db differ diff --git a/api/abc.html b/api/abc.html index 10047c84..b6cfa095 100644 --- a/api/abc.html +++ b/api/abc.html @@ -1789,7 +1789,7 @@

pyxu.abc.operator

k (Integer) – Number of singular values to compute.

  • gpu (bool) – If True the singular value decomposition is performed on the GPU.

  • dtype (DType) – Working precision of the linear operator.

  • -
  • kwargs (Mapping) – Additional kwargs accepted by svds().

  • +
  • kwargs (Mapping) – Additional kwargs accepted by svds().

  • Returns:
    diff --git a/api/index.html b/api/index.html index 1fd3c2aa..c69c9b7e 100644 --- a/api/index.html +++ b/api/index.html @@ -873,7 +873,7 @@

    SciPy

    from_sciop(cls, sp_op)

    -

    Wrap a LinearOperator as a 2D LinOp (or sub-class thereof).

    +

    Wrap a LinearOperator as a 2D LinOp (or sub-class thereof).

    diff --git a/api/info.deps.html b/api/info.deps.html index 9955ccd2..b1989450 100644 --- a/api/info.deps.html +++ b/api/info.deps.html @@ -644,7 +644,7 @@

    Python module associated to an array backend.

    Parameters:
    -

    linalg (bool) – Return the linear-algebra submodule with identical API to scipy.sparse.linalg.

    +

    linalg (bool) – Return the linear-algebra submodule with identical API to scipy.sparse.linalg.

    Return type:

    ModuleType

    diff --git a/api/operator.interop.html b/api/operator.interop.html index ff3ee56b..edee4d4c 100644 --- a/api/operator.interop.html +++ b/api/operator.interop.html @@ -604,11 +604,11 @@

    SciPy
    from_sciop(cls, sp_op)[source]#
    -

    Wrap a LinearOperator as a 2D LinOp (or sub-class thereof).

    +

    Wrap a LinearOperator as a 2D LinOp (or sub-class thereof).

    Parameters:
      -
    • sp_op (LinearOperator) – (N, M) Linear CPU/GPU operator compliant with SciPy’s interface.

    • +
    • sp_op (LinearOperator) – (N, M) Linear CPU/GPU operator compliant with SciPy’s interface.

    • cls (Type[OpT])

    diff --git a/api/operator/linop.html b/api/operator/linop.html index 7ad34456..509896bd 100644 --- a/api/operator/linop.html +++ b/api/operator/linop.html @@ -941,8 +941,8 @@

    Transforms
    • dim_shape (NDArrayShape) – (M1,…,MD) dimensions of the input \(\mathbf{x} \in \mathbb{C}^{M_{1} \times\cdots\times M_{D}}\).

    • axes (NDArrayAxis) – Axes over which to compute the FFT. If not given, all axes are used.

    • -
    • kwargs (dict) –

      Extra kwargs passed to scipy.fft.fftn() or cupyx.scipy.fft.fftn().

      -

      Supported parameters for scipy.fft.fftn() are:

      +
    • kwargs (dict) –

      Extra kwargs passed to scipy.fft.fftn() or cupyx.scipy.fft.fftn().

      +

      Supported parameters for scipy.fft.fftn() are:

      • workers: int = None

      • diff --git a/api/operator/map.html b/api/operator/map.html index 3ef60e5b..83d985b1 100644 --- a/api/operator/map.html +++ b/api/operator/map.html @@ -2390,7 +2390,7 @@

        MiscInteger) – Number of singular values to compute.

      • gpu (bool) – If True the singular value decomposition is performed on the GPU.

      • dtype (DType) – Working precision of the linear operator.

      • -
      • kwargs (Mapping) – Additional kwargs accepted by svds().

      • +
      • kwargs (Mapping) – Additional kwargs accepted by svds().

    Returns:
    diff --git a/build/html/WARNINGS.log b/build/html/WARNINGS.log index 7b932767..2f79d652 100644 --- a/build/html/WARNINGS.log +++ b/build/html/WARNINGS.log @@ -183,10 +183,10 @@ Hands-On Example 🎓 /home/runner/work/pyxu-org.github.io/pyxu-org.github.io/pyxu/doc/guide/lipschitz.ipynb:51: WARNING: File not found: 'api/abc.html#pyxu.abc.LinOp' /home/runner/work/pyxu-org.github.io/pyxu-org.github.io/pyxu/doc/guide/lipschitz.ipynb:285: WARNING: File not found: 'api/index.html' /home/runner/work/pyxu-org.github.io/pyxu-org.github.io/pyxu/doc/guide/lipschitz.ipynb:285: WARNING: File not found: 'api/abc/arithmetic.html' -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/opt/solver/pds.py:docstring of pyxu.opt.solver.pds.ADMM:152: WARNING: Exception occurred in plotting opt-solver-3 +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/opt/solver/pds.py:docstring of pyxu.opt.solver.pds.ADMM:152: WARNING: Exception occurred in plotting opt-solver-3 from /home/runner/work/pyxu-org.github.io/pyxu-org.github.io/pyxu/doc/api/opt.solver.rst: Traceback (most recent call last): - File "/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/matplotlib/sphinxext/plot_directive.py", line 552, in _run_code + File "/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/matplotlib/sphinxext/plot_directive.py", line 552, in _run_code exec(code, ns) File "", line 29, in TypeError: SquaredL2Norm.__init__() got an unexpected keyword argument 'dim' @@ -199,22 +199,22 @@ except ImportError as e: block source: !pip install pyxu-gradient-descent /home/runner/work/pyxu-org.github.io/pyxu-org.github.io/pyxu/doc/examples/deblur.ipynb:278: WARNING: File not found: 'api/opt.solver.html#pyxu.opt.solver.PD3O' WARNING: missing attribute to_sciop in object pyxu.abc.LinOp -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/abc/operator.py:docstring of pyxu.abc.operator.LinOp.asarray:8: WARNING: Inline emphasis start-string without end-string. -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/abc/operator.py:docstring of pyxu.abc.operator.LinOp.asarray:8: WARNING: Inline emphasis start-string without end-string. -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/abc/operator.py:docstring of pyxu.abc.operator.LinOp.from_array:3: WARNING: Inline emphasis start-string without end-string. -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/abc/operator.py:docstring of pyxu.abc.operator.LinOp.from_array:3: WARNING: Inline emphasis start-string without end-string. -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:29: WARNING: Inline interpreted text or phrase reference start-string without end-string. +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/abc/operator.py:docstring of pyxu.abc.operator.LinOp.asarray:8: WARNING: Inline emphasis start-string without end-string. +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/abc/operator.py:docstring of pyxu.abc.operator.LinOp.asarray:8: WARNING: Inline emphasis start-string without end-string. +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/abc/operator.py:docstring of pyxu.abc.operator.LinOp.from_array:3: WARNING: Inline emphasis start-string without end-string. +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/abc/operator.py:docstring of pyxu.abc.operator.LinOp.from_array:3: WARNING: Inline emphasis start-string without end-string. +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:29: WARNING: Inline interpreted text or phrase reference start-string without end-string. WARNING: missing attribute relative_indices in object pyxu.operator.Stencil -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Divergence:77: WARNING: Exception occurred in plotting linop-12 +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Divergence:77: WARNING: Exception occurred in plotting linop-12 from /home/runner/work/pyxu-org.github.io/pyxu-org.github.io/pyxu/doc/api/operator/linop.rst: Traceback (most recent call last): - File "/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/matplotlib/sphinxext/plot_directive.py", line 552, in _run_code + File "/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/matplotlib/sphinxext/plot_directive.py", line 552, in _run_code exec(code, ns) File "", line 14, in - File "/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/abc/operator.py", line 337, in __mul__ + File "/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/abc/operator.py", line 337, in __mul__ return arithmetic.ChainRule(lhs=self, rhs=other).op() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/abc/arithmetic.py", line 991, in __init__ + File "/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/abc/arithmetic.py", line 991, in __init__ assert lhs.dim_shape == rhs.codim_shape, "Operator dimensions are not compatible." ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ AssertionError: Operator dimensions are not compatible. @@ -236,30 +236,30 @@ AssertionError: Operator dimensions are not compatible. /home/runner/work/pyxu-org.github.io/pyxu-org.github.io/pyxu/doc/references.rst:23: WARNING: Citation [cuFINUFFT] is not referenced. /home/runner/work/pyxu-org.github.io/pyxu-org.github.io/pyxu/doc/references.rst:25: WARNING: Citation [PoCS_Dykstra] is not referenced. /home/runner/work/pyxu-org.github.io/pyxu-org.github.io/pyxu/doc/references.rst:29: WARNING: Citation [UQ_MCMC] is not referenced. -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/jax.py:docstring of pyxu.operator.interop.jax._from_jax:: WARNING: py:class reference target not found: pyxu.operator.interop.jax.JaxArray -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/jax.py:docstring of pyxu.operator.interop.jax._to_jax:: WARNING: py:class reference target not found: pyxu.operator.interop.jax.JaxArray -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:33: WARNING: py:class reference target not found: ----- -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:34: WARNING: py:class reference target not found: * If provided -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:34: WARNING: py:class reference target not found: arithmetic methods must abide exactly -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:34: WARNING: py:class reference target not found: the interface below -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:46: WARNING: py:class reference target not found: * Auto-vectorization consists in decorating `kwargs -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:48: WARNING: py:class reference target not found: * Arithmetic methods are **not currently JIT-ed** even if `jit -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:48: WARNING: py:class reference target not found: the -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:50: WARNING: py:class reference target not found: * For :py:class:`~pyxu.abc.DiffMap -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:50: WARNING: py:class reference target not found: the methods :py:meth:`~pyxu.abc.DiffMap.jacobian -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:57: WARNING: py:class reference target not found: . Warning:: -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/pad.py:docstring of pyxu.operator.linop.pad.Pad.__init__:10: WARNING: py:class reference target not found: pyxu.operator.linop.pad.Pad.WidthSpec -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/fft/czt.py:docstring of pyxu.operator.linop.fft.czt.CZT.__init__:6: WARNING: py:class reference target not found: list(int) -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/fft/czt.py:docstring of pyxu.operator.linop.fft.czt.CZT.__init__:8: WARNING: py:class reference target not found: list(complex) -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/fft/czt.py:docstring of pyxu.operator.linop.fft.czt.CZT.__init__:10: WARNING: py:class reference target not found: list(complex) -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/filter.py:docstring of pyxu.operator.linop.filter.MovingAverage:30: WARNING: py:class reference target not found: pyxu.operator.linop.filter.IndexSpec -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/filter.py:docstring of pyxu.operator.linop.filter.MovingAverage:76: WARNING: py:class reference target not found: pyxu.operator.GaussianFilter -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Hessian:62: WARNING: py:class reference target not found: (Integer -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Hessian:62: WARNING: py:class reference target not found: Integer) -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Hessian:62: WARNING: py:class reference target not found: ((Integer -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Hessian:62: WARNING: py:class reference target not found: Integer) -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Hessian:62: WARNING: py:class reference target not found: (Integer -/opt/hostedtoolcache/Python/3.12.4/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Hessian:62: WARNING: py:class reference target not found: Integer)) +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/jax.py:docstring of pyxu.operator.interop.jax._from_jax:: WARNING: py:class reference target not found: pyxu.operator.interop.jax.JaxArray +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/jax.py:docstring of pyxu.operator.interop.jax._to_jax:: WARNING: py:class reference target not found: pyxu.operator.interop.jax.JaxArray +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:33: WARNING: py:class reference target not found: ----- +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:34: WARNING: py:class reference target not found: * If provided +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:34: WARNING: py:class reference target not found: arithmetic methods must abide exactly +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:34: WARNING: py:class reference target not found: the interface below +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:46: WARNING: py:class reference target not found: * Auto-vectorization consists in decorating `kwargs +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:48: WARNING: py:class reference target not found: * Arithmetic methods are **not currently JIT-ed** even if `jit +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:48: WARNING: py:class reference target not found: the +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:50: WARNING: py:class reference target not found: * For :py:class:`~pyxu.abc.DiffMap +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:50: WARNING: py:class reference target not found: the methods :py:meth:`~pyxu.abc.DiffMap.jacobian +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/interop/torch.py:docstring of pyxu.operator.interop.torch.from_torch:57: WARNING: py:class reference target not found: . Warning:: +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/pad.py:docstring of pyxu.operator.linop.pad.Pad.__init__:10: WARNING: py:class reference target not found: pyxu.operator.linop.pad.Pad.WidthSpec +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/fft/czt.py:docstring of pyxu.operator.linop.fft.czt.CZT.__init__:6: WARNING: py:class reference target not found: list(int) +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/fft/czt.py:docstring of pyxu.operator.linop.fft.czt.CZT.__init__:8: WARNING: py:class reference target not found: list(complex) +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/fft/czt.py:docstring of pyxu.operator.linop.fft.czt.CZT.__init__:10: WARNING: py:class reference target not found: list(complex) +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/filter.py:docstring of pyxu.operator.linop.filter.MovingAverage:30: WARNING: py:class reference target not found: pyxu.operator.linop.filter.IndexSpec +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/filter.py:docstring of pyxu.operator.linop.filter.MovingAverage:76: WARNING: py:class reference target not found: pyxu.operator.GaussianFilter +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Hessian:62: WARNING: py:class reference target not found: (Integer +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Hessian:62: WARNING: py:class reference target not found: Integer) +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Hessian:62: WARNING: py:class reference target not found: ((Integer +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Hessian:62: WARNING: py:class reference target not found: Integer) +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Hessian:62: WARNING: py:class reference target not found: (Integer +/opt/hostedtoolcache/Python/3.12.5/x64/lib/python3.12/site-packages/pyxu/operator/linop/diff.py:docstring of pyxu.operator.linop.diff.Hessian:62: WARNING: py:class reference target not found: Integer)) /home/runner/work/pyxu-org.github.io/pyxu-org.github.io/pyxu/doc/examples/contributing.ipynb:42: WARNING: Lexing literal_block '!pip install pyxu-gradient-descent' as "python" resulted in an error at token: '!'. Retrying in relaxed mode. /home/runner/work/pyxu-org.github.io/pyxu-org.github.io/pyxu/doc/examples/index.rst:46: WARNING: toctree contains reference to document 'examples/images/index' that doesn't have a title: no link will be generated /home/runner/work/pyxu-org.github.io/pyxu-org.github.io/pyxu/doc/examples/index.rst:46: WARNING: toctree contains reference to document 'examples/images/index' that doesn't have a title: no link will be generated diff --git a/guide/diff_ops.html b/guide/diff_ops.html index c0fc80c6..a9288728 100644 --- a/guide/diff_ops.html +++ b/guide/diff_ops.html @@ -292,7 +292,7 @@

    Partial Derivatives \[\mathbf{D} f [n] = f[n+1] - f[n]\]

    As we will see below, this is the forward finite difference approximation. This could be implemented in matrix-form, in which case it would look like this:

    -

    f6c9e076b2d243c28c3f5518b0dc0d94

    +

    3839104068144c42b454d489eed9bb42

    Or, it could be instead implemented via a for loop, in which the case of large input signals, would not require storing a large matrix:

    [1]:
    diff --git a/plot_directive/api/abc-1.pdf b/plot_directive/api/abc-1.pdf
    index 5010b4bd..b1260782 100644
    Binary files a/plot_directive/api/abc-1.pdf and b/plot_directive/api/abc-1.pdf differ
    diff --git a/plot_directive/api/experimental/sampler-1_00_00.hires.png b/plot_directive/api/experimental/sampler-1_00_00.hires.png
    index fe8ec311..33039249 100644
    Binary files a/plot_directive/api/experimental/sampler-1_00_00.hires.png and b/plot_directive/api/experimental/sampler-1_00_00.hires.png differ
    diff --git a/plot_directive/api/experimental/sampler-1_00_00.pdf b/plot_directive/api/experimental/sampler-1_00_00.pdf
    index 0f8dc330..f0f7df1d 100644
    Binary files a/plot_directive/api/experimental/sampler-1_00_00.pdf and b/plot_directive/api/experimental/sampler-1_00_00.pdf differ
    diff --git a/plot_directive/api/experimental/sampler-1_00_00.png b/plot_directive/api/experimental/sampler-1_00_00.png
    index 1aadf09c..d2e4f28a 100644
    Binary files a/plot_directive/api/experimental/sampler-1_00_00.png and b/plot_directive/api/experimental/sampler-1_00_00.png differ
    diff --git a/plot_directive/api/experimental/sampler-1_01_00.hires.png b/plot_directive/api/experimental/sampler-1_01_00.hires.png
    index af0b4807..8a168173 100644
    Binary files a/plot_directive/api/experimental/sampler-1_01_00.hires.png and b/plot_directive/api/experimental/sampler-1_01_00.hires.png differ
    diff --git a/plot_directive/api/experimental/sampler-1_01_00.pdf b/plot_directive/api/experimental/sampler-1_01_00.pdf
    index d1a4f8d9..9503edd1 100644
    Binary files a/plot_directive/api/experimental/sampler-1_01_00.pdf and b/plot_directive/api/experimental/sampler-1_01_00.pdf differ
    diff --git a/plot_directive/api/experimental/sampler-1_01_00.png b/plot_directive/api/experimental/sampler-1_01_00.png
    index 366ddf11..2ddb1949 100644
    Binary files a/plot_directive/api/experimental/sampler-1_01_00.png and b/plot_directive/api/experimental/sampler-1_01_00.png differ
    diff --git a/plot_directive/api/operator/linop-1.pdf b/plot_directive/api/operator/linop-1.pdf
    index 5030e491..b83e35c1 100644
    Binary files a/plot_directive/api/operator/linop-1.pdf and b/plot_directive/api/operator/linop-1.pdf differ
    diff --git a/plot_directive/api/operator/linop-10.pdf b/plot_directive/api/operator/linop-10.pdf
    index 53487372..5737c965 100644
    Binary files a/plot_directive/api/operator/linop-10.pdf and b/plot_directive/api/operator/linop-10.pdf differ
    diff --git a/plot_directive/api/operator/linop-11.pdf b/plot_directive/api/operator/linop-11.pdf
    index 0984968d..a9fced7d 100644
    Binary files a/plot_directive/api/operator/linop-11.pdf and b/plot_directive/api/operator/linop-11.pdf differ
    diff --git a/plot_directive/api/operator/linop-13.pdf b/plot_directive/api/operator/linop-13.pdf
    index af44779f..68e2bbad 100644
    Binary files a/plot_directive/api/operator/linop-13.pdf and b/plot_directive/api/operator/linop-13.pdf differ
    diff --git a/plot_directive/api/operator/linop-14.pdf b/plot_directive/api/operator/linop-14.pdf
    index 658526e8..d8b8ade2 100644
    Binary files a/plot_directive/api/operator/linop-14.pdf and b/plot_directive/api/operator/linop-14.pdf differ
    diff --git a/plot_directive/api/operator/linop-15.pdf b/plot_directive/api/operator/linop-15.pdf
    index 79f50504..c71ee44f 100644
    Binary files a/plot_directive/api/operator/linop-15.pdf and b/plot_directive/api/operator/linop-15.pdf differ
    diff --git a/plot_directive/api/operator/linop-16_00.pdf b/plot_directive/api/operator/linop-16_00.pdf
    index 7dbbcb13..f0d8ddcd 100644
    Binary files a/plot_directive/api/operator/linop-16_00.pdf and b/plot_directive/api/operator/linop-16_00.pdf differ
    diff --git a/plot_directive/api/operator/linop-16_01.pdf b/plot_directive/api/operator/linop-16_01.pdf
    index 4e2021ad..7ab1acf6 100644
    Binary files a/plot_directive/api/operator/linop-16_01.pdf and b/plot_directive/api/operator/linop-16_01.pdf differ
    diff --git a/plot_directive/api/operator/linop-16_02.pdf b/plot_directive/api/operator/linop-16_02.pdf
    index 2e028b5e..739e4eb6 100644
    Binary files a/plot_directive/api/operator/linop-16_02.pdf and b/plot_directive/api/operator/linop-16_02.pdf differ
    diff --git a/plot_directive/api/operator/linop-17_00.pdf b/plot_directive/api/operator/linop-17_00.pdf
    index 5b4c0b5f..9ff625f5 100644
    Binary files a/plot_directive/api/operator/linop-17_00.pdf and b/plot_directive/api/operator/linop-17_00.pdf differ
    diff --git a/plot_directive/api/operator/linop-17_01.pdf b/plot_directive/api/operator/linop-17_01.pdf
    index fa8dbe90..5750920b 100644
    Binary files a/plot_directive/api/operator/linop-17_01.pdf and b/plot_directive/api/operator/linop-17_01.pdf differ
    diff --git a/plot_directive/api/operator/linop-18_00.pdf b/plot_directive/api/operator/linop-18_00.pdf
    index e52fce00..0c5d8dcd 100644
    Binary files a/plot_directive/api/operator/linop-18_00.pdf and b/plot_directive/api/operator/linop-18_00.pdf differ
    diff --git a/plot_directive/api/operator/linop-18_01.pdf b/plot_directive/api/operator/linop-18_01.pdf
    index 5d11bdbf..47ec071c 100644
    Binary files a/plot_directive/api/operator/linop-18_01.pdf and b/plot_directive/api/operator/linop-18_01.pdf differ
    diff --git a/plot_directive/api/operator/linop-18_02.pdf b/plot_directive/api/operator/linop-18_02.pdf
    index 072d4626..3d7799c8 100644
    Binary files a/plot_directive/api/operator/linop-18_02.pdf and b/plot_directive/api/operator/linop-18_02.pdf differ
    diff --git a/plot_directive/api/operator/linop-18_03.pdf b/plot_directive/api/operator/linop-18_03.pdf
    index 6be0b0dc..88ccce7d 100644
    Binary files a/plot_directive/api/operator/linop-18_03.pdf and b/plot_directive/api/operator/linop-18_03.pdf differ
    diff --git a/plot_directive/api/operator/linop-2.pdf b/plot_directive/api/operator/linop-2.pdf
    index 048f2648..c9c1bde6 100644
    Binary files a/plot_directive/api/operator/linop-2.pdf and b/plot_directive/api/operator/linop-2.pdf differ
    diff --git a/plot_directive/api/operator/linop-3.pdf b/plot_directive/api/operator/linop-3.pdf
    index 87ec07d3..7a7f55bd 100644
    Binary files a/plot_directive/api/operator/linop-3.pdf and b/plot_directive/api/operator/linop-3.pdf differ
    diff --git a/plot_directive/api/operator/linop-4.pdf b/plot_directive/api/operator/linop-4.pdf
    index 1132e8e3..00611df5 100644
    Binary files a/plot_directive/api/operator/linop-4.pdf and b/plot_directive/api/operator/linop-4.pdf differ
    diff --git a/plot_directive/api/operator/linop-5.pdf b/plot_directive/api/operator/linop-5.pdf
    index 357fc0f0..e073ccc7 100644
    Binary files a/plot_directive/api/operator/linop-5.pdf and b/plot_directive/api/operator/linop-5.pdf differ
    diff --git a/plot_directive/api/operator/linop-6.pdf b/plot_directive/api/operator/linop-6.pdf
    index 75483872..306c79c6 100644
    Binary files a/plot_directive/api/operator/linop-6.pdf and b/plot_directive/api/operator/linop-6.pdf differ
    diff --git a/plot_directive/api/operator/linop-7_00.pdf b/plot_directive/api/operator/linop-7_00.pdf
    index 5c304f58..5370faf5 100644
    Binary files a/plot_directive/api/operator/linop-7_00.pdf and b/plot_directive/api/operator/linop-7_00.pdf differ
    diff --git a/plot_directive/api/operator/linop-7_01.pdf b/plot_directive/api/operator/linop-7_01.pdf
    index b659fae4..bdcdf55a 100644
    Binary files a/plot_directive/api/operator/linop-7_01.pdf and b/plot_directive/api/operator/linop-7_01.pdf differ
    diff --git a/plot_directive/api/operator/linop-7_02.pdf b/plot_directive/api/operator/linop-7_02.pdf
    index 29c37d89..40270551 100644
    Binary files a/plot_directive/api/operator/linop-7_02.pdf and b/plot_directive/api/operator/linop-7_02.pdf differ
    diff --git a/plot_directive/api/operator/linop-7_03.pdf b/plot_directive/api/operator/linop-7_03.pdf
    index 4f88643f..c0097481 100644
    Binary files a/plot_directive/api/operator/linop-7_03.pdf and b/plot_directive/api/operator/linop-7_03.pdf differ
    diff --git a/plot_directive/api/operator/linop-8_00.pdf b/plot_directive/api/operator/linop-8_00.pdf
    index e415a2a0..ee054148 100644
    Binary files a/plot_directive/api/operator/linop-8_00.pdf and b/plot_directive/api/operator/linop-8_00.pdf differ
    diff --git a/plot_directive/api/operator/linop-8_01.pdf b/plot_directive/api/operator/linop-8_01.pdf
    index 480b9e74..df6a5fd1 100644
    Binary files a/plot_directive/api/operator/linop-8_01.pdf and b/plot_directive/api/operator/linop-8_01.pdf differ
    diff --git a/plot_directive/api/operator/linop-9_00.pdf b/plot_directive/api/operator/linop-9_00.pdf
    index d287764e..d1172219 100644
    Binary files a/plot_directive/api/operator/linop-9_00.pdf and b/plot_directive/api/operator/linop-9_00.pdf differ
    diff --git a/plot_directive/api/operator/linop-9_01.pdf b/plot_directive/api/operator/linop-9_01.pdf
    index b3a2e1f7..694e63f0 100644
    Binary files a/plot_directive/api/operator/linop-9_01.pdf and b/plot_directive/api/operator/linop-9_01.pdf differ
    diff --git a/plot_directive/api/operator/linop-9_02.pdf b/plot_directive/api/operator/linop-9_02.pdf
    index cc483863..412624b2 100644
    Binary files a/plot_directive/api/operator/linop-9_02.pdf and b/plot_directive/api/operator/linop-9_02.pdf differ
    diff --git a/plot_directive/api/opt-solver-1.pdf b/plot_directive/api/opt-solver-1.pdf
    index 97a958db..393dc1b6 100644
    Binary files a/plot_directive/api/opt-solver-1.pdf and b/plot_directive/api/opt-solver-1.pdf differ
    diff --git a/plot_directive/api/opt-solver-2.pdf b/plot_directive/api/opt-solver-2.pdf
    index 5e64acc2..6e7df199 100644
    Binary files a/plot_directive/api/opt-solver-2.pdf and b/plot_directive/api/opt-solver-2.pdf differ
    diff --git a/plot_directive/api/util-1.pdf b/plot_directive/api/util-1.pdf
    index 64b52f80..99d0c3eb 100644
    Binary files a/plot_directive/api/util-1.pdf and b/plot_directive/api/util-1.pdf differ
    diff --git a/plot_directive/api/util-2.pdf b/plot_directive/api/util-2.pdf
    index 07b62360..9fc748ac 100644
    Binary files a/plot_directive/api/util-2.pdf and b/plot_directive/api/util-2.pdf differ