diff --git a/.gitignore b/.gitignore index a4aa204..2b0d75f 100644 --- a/.gitignore +++ b/.gitignore @@ -80,8 +80,6 @@ ENV/ # Dev utils dev.py -benchmark.py -profile_.py # Test fixtures comparison_regression_suite.yaml diff --git a/jsonpath/env.py b/jsonpath/env.py index 6aa16a8..dd4d2b7 100644 --- a/jsonpath/env.py +++ b/jsonpath/env.py @@ -36,11 +36,11 @@ from .parse import Parser from .path import CompoundJSONPath from .path import JSONPath -from .stream import TokenStream from .token import TOKEN_EOF from .token import TOKEN_INTERSECTION from .token import TOKEN_UNION from .token import Token +from .token import TokenStream if TYPE_CHECKING: from io import IOBase diff --git a/jsonpath/match.py b/jsonpath/match.py index 1f39059..0925cc1 100644 --- a/jsonpath/match.py +++ b/jsonpath/match.py @@ -37,7 +37,6 @@ class JSONPathMatch: "obj", "parent", "parts", - "path", "root", ) @@ -49,7 +48,6 @@ def __init__( filter_context: FilterContextVars, obj: object, parent: Optional[JSONPathMatch], - path: str, parts: Tuple[PathPart, ...], root: Union[Sequence[Any], Mapping[str, Any]], ) -> None: @@ -58,12 +56,16 @@ def __init__( self.obj: object = obj self.parent: Optional[JSONPathMatch] = parent self.parts: Tuple[PathPart, ...] = parts - self.path: str = path self.root: Union[Sequence[Any], Mapping[str, Any]] = root def __str__(self) -> str: return f"{_truncate(str(self.obj), 5)!r} @ {_truncate(self.path, 5)}" + @property + def path(self) -> str: + """The canonical string representation of the path to this match.""" + return "$" + "".join((_path_repr(p) for p in self.parts)) + def add_child(self, *children: JSONPathMatch) -> None: """Append one or more children to this match.""" self.children.extend(children) @@ -86,6 +88,14 @@ def _truncate(val: str, num: int, end: str = "...") -> str: return " ".join(words[:num]) + end +def _path_repr(part: Union[str, int]) -> str: + if isinstance(part, str): + if len(part) > 1 and part.startswith(("#", "~")): + return f"[{part}]" + return f"['{part}']" + return f"[{part}]" + + class NodeList(List[JSONPathMatch]): """List of JSONPathMatch objects, analogous to the spec's nodelist.""" diff --git a/jsonpath/parse.py b/jsonpath/parse.py index 2e9c053..5473ae2 100644 --- a/jsonpath/parse.py +++ b/jsonpath/parse.py @@ -97,7 +97,7 @@ if TYPE_CHECKING: from .env import JSONPathEnvironment - from .stream import TokenStream + from .token import TokenStream # ruff: noqa: D102 @@ -197,6 +197,13 @@ class Parser: ] ) + END_SELECTOR = frozenset( + [ + TOKEN_EOF, + TOKEN_RBRACKET, + ] + ) + RE_FLAG_MAP = { "a": re.A, "i": re.I, @@ -269,7 +276,7 @@ def __init__(self, *, env: JSONPathEnvironment) -> None: def parse(self, stream: TokenStream) -> Iterable[JSONPathSelector]: """Parse a JSONPath from a stream of tokens.""" if stream.current.kind == TOKEN_ROOT: - stream.next_token() + next(stream) yield from self.parse_path(stream, in_filter=False) if stream.current.kind not in (TOKEN_EOF, TOKEN_INTERSECTION, TOKEN_UNION): @@ -316,16 +323,16 @@ def parse_path( yield self.parse_selector_list(stream) else: if in_filter: - stream.push(stream.current) + stream.backup() break - stream.next_token() + next(stream) def parse_slice(self, stream: TokenStream) -> SliceSelector: """Parse a slice JSONPath expression from a stream of tokens.""" - start_token = stream.next_token() + start_token = next(stream) stream.expect(TOKEN_SLICE_STOP) - stop_token = stream.next_token() + stop_token = next(stream) stream.expect(TOKEN_SLICE_STEP) step_token = stream.current @@ -354,7 +361,7 @@ def parse_slice(self, stream: TokenStream) -> SliceSelector: def parse_selector_list(self, stream: TokenStream) -> ListSelector: # noqa: PLR0912 """Parse a comma separated list JSONPath selectors from a stream of tokens.""" - tok = stream.next_token() + tok = next(stream) list_items: List[ Union[ IndexSelector, @@ -448,9 +455,9 @@ def parse_selector_list(self, stream: TokenStream) -> ListSelector: # noqa: PLR if stream.peek.kind != TOKEN_RBRACKET: # TODO: error message .. expected a comma or logical operator stream.expect_peek(TOKEN_COMMA) - stream.next_token() + next(stream) - stream.next_token() + next(stream) if not list_items: raise JSONPathSyntaxError("empty bracketed segment", token=tok) @@ -458,7 +465,7 @@ def parse_selector_list(self, stream: TokenStream) -> ListSelector: # noqa: PLR return ListSelector(env=self.env, token=tok, items=list_items) def parse_filter(self, stream: TokenStream) -> Filter: - tok = stream.next_token() + tok = next(stream) expr = self.parse_filter_selector(stream) if self.env.well_typed and isinstance(expr, FunctionExtension): @@ -496,7 +503,7 @@ def parse_float_literal(self, stream: TokenStream) -> FilterExpression: return FloatLiteral(value=float(stream.current.value)) def parse_prefix_expression(self, stream: TokenStream) -> FilterExpression: - tok = stream.next_token() + tok = next(stream) assert tok.kind == TOKEN_NOT return PrefixExpression( operator="!", @@ -506,7 +513,7 @@ def parse_prefix_expression(self, stream: TokenStream) -> FilterExpression: def parse_infix_expression( self, stream: TokenStream, left: FilterExpression ) -> FilterExpression: - tok = stream.next_token() + tok = next(stream) precedence = self.PRECEDENCES.get(tok.kind, self.PRECEDENCE_LOWEST) right = self.parse_filter_selector(stream, precedence) operator = self.BINARY_OPERATORS[tok.kind] @@ -521,9 +528,9 @@ def parse_infix_expression( return InfixExpression(left, operator, right) def parse_grouped_expression(self, stream: TokenStream) -> FilterExpression: - stream.next_token() + next(stream) expr = self.parse_filter_selector(stream) - stream.next_token() + next(stream) while stream.current.kind != TOKEN_RPAREN: if stream.current.kind == TOKEN_EOF: @@ -536,13 +543,13 @@ def parse_grouped_expression(self, stream: TokenStream) -> FilterExpression: return expr def parse_root_path(self, stream: TokenStream) -> FilterExpression: - stream.next_token() + next(stream) return RootPath( JSONPath(env=self.env, selectors=self.parse_path(stream, in_filter=True)) ) def parse_self_path(self, stream: TokenStream) -> FilterExpression: - stream.next_token() + next(stream) return SelfPath( JSONPath(env=self.env, selectors=self.parse_path(stream, in_filter=True)) ) @@ -551,7 +558,7 @@ def parse_current_key(self, _: TokenStream) -> FilterExpression: return CURRENT_KEY def parse_filter_context_path(self, stream: TokenStream) -> FilterExpression: - stream.next_token() + next(stream) return FilterContextPath( JSONPath(env=self.env, selectors=self.parse_path(stream, in_filter=True)) ) @@ -559,14 +566,14 @@ def parse_filter_context_path(self, stream: TokenStream) -> FilterExpression: def parse_regex(self, stream: TokenStream) -> FilterExpression: pattern = stream.current.value if stream.peek.kind == TOKEN_RE_FLAGS: - stream.next_token() + next(stream) flags = 0 for flag in set(stream.current.value): flags |= self.RE_FLAG_MAP[flag] return RegexLiteral(value=re.compile(pattern, flags)) def parse_list_literal(self, stream: TokenStream) -> FilterExpression: - stream.next_token() + next(stream) list_items: List[FilterExpression] = [] while stream.current.kind != TOKEN_RBRACKET: @@ -580,15 +587,15 @@ def parse_list_literal(self, stream: TokenStream) -> FilterExpression: if stream.peek.kind != TOKEN_RBRACKET: stream.expect_peek(TOKEN_COMMA) - stream.next_token() + next(stream) - stream.next_token() + next(stream) return ListLiteral(list_items) def parse_function_extension(self, stream: TokenStream) -> FilterExpression: function_arguments: List[FilterExpression] = [] - tok = stream.next_token() + tok = next(stream) while stream.current.kind != TOKEN_RPAREN: try: @@ -604,7 +611,7 @@ def parse_function_extension(self, stream: TokenStream) -> FilterExpression: # The argument could be a comparison or logical expression peek_kind = stream.peek.kind while peek_kind in self.BINARY_OPERATORS: - stream.next_token() + next(stream) expr = self.parse_infix_expression(stream, expr) peek_kind = stream.peek.kind @@ -612,9 +619,9 @@ def parse_function_extension(self, stream: TokenStream) -> FilterExpression: if stream.peek.kind != TOKEN_RPAREN: stream.expect_peek(TOKEN_COMMA) - stream.next_token() + next(stream) - stream.next_token() + next(stream) return FunctionExtension( tok.value, @@ -627,7 +634,7 @@ def parse_filter_selector( try: left = self.token_map[stream.current.kind](stream) except KeyError as err: - if stream.current.kind in (TOKEN_EOF, TOKEN_RBRACKET): + if stream.current.kind in self.END_SELECTOR: msg = "end of expression" else: msg = repr(stream.current.value) @@ -638,7 +645,7 @@ def parse_filter_selector( while True: peek_kind = stream.peek.kind if ( - peek_kind in (TOKEN_EOF, TOKEN_RBRACKET) + peek_kind in self.END_SELECTOR or self.PRECEDENCES.get(peek_kind, self.PRECEDENCE_LOWEST) < precedence ): break @@ -646,7 +653,7 @@ def parse_filter_selector( if peek_kind not in self.BINARY_OPERATORS: return left - stream.next_token() + next(stream) left = self.parse_infix_expression(stream, left) return left diff --git a/jsonpath/path.py b/jsonpath/path.py index 9a97f68..34a37d3 100644 --- a/jsonpath/path.py +++ b/jsonpath/path.py @@ -124,7 +124,6 @@ def finditer( filter_context=filter_context or {}, obj=_data, parent=None, - path=self.env.root_token, parts=(), root=_data, ) @@ -163,7 +162,6 @@ async def root_iter() -> AsyncIterable[JSONPathMatch]: filter_context=filter_context or {}, obj=_data, parent=None, - path=self.env.root_token, parts=(), root=_data, ) diff --git a/jsonpath/selectors.py b/jsonpath/selectors.py index 5419798..a972f35 100644 --- a/jsonpath/selectors.py +++ b/jsonpath/selectors.py @@ -88,7 +88,6 @@ def resolve(self, matches: Iterable[JSONPathMatch]) -> Iterable[JSONPathMatch]: obj=self.env.getitem(match.obj, self.name), parent=match, parts=match.parts + (self.name,), - path=match.path + f"['{self.name}']", root=match.root, ) match.add_child(_match) @@ -107,7 +106,6 @@ async def resolve_async( obj=await self.env.getitem_async(match.obj, self.name), parent=match, parts=match.parts + (self.name,), - path=match.path + f"['{self.name}']", root=match.root, ) match.add_child(_match) @@ -161,7 +159,6 @@ def resolve(self, matches: Iterable[JSONPathMatch]) -> Iterable[JSONPathMatch]: obj=self.env.getitem(match.obj, self._as_key), parent=match, parts=match.parts + (self._as_key,), - path=f"{match.path}['{self.index}']", root=match.root, ) match.add_child(_match) @@ -174,7 +171,6 @@ def resolve(self, matches: Iterable[JSONPathMatch]) -> Iterable[JSONPathMatch]: obj=self.env.getitem(match.obj, self.index), parent=match, parts=match.parts + (norm_index,), - path=match.path + f"[{norm_index}]", root=match.root, ) match.add_child(_match) @@ -192,7 +188,6 @@ async def resolve_async( obj=await self.env.getitem_async(match.obj, self._as_key), parent=match, parts=match.parts + (self._as_key,), - path=f"{match.path}['{self.index}']", root=match.root, ) match.add_child(_match) @@ -205,7 +200,6 @@ async def resolve_async( obj=await self.env.getitem_async(match.obj, self.index), parent=match, parts=match.parts + (norm_index,), - path=match.path + f"[{norm_index}]", root=match.root, ) match.add_child(_match) @@ -238,13 +232,12 @@ def __hash__(self) -> int: def _keys(self, match: JSONPathMatch) -> Iterable[JSONPathMatch]: if isinstance(match.obj, Mapping): - for i, key in enumerate(match.obj.keys()): + for key in match.obj: _match = self.env.match_class( filter_context=match.filter_context(), obj=key, parent=match, parts=match.parts + (f"{self.env.keys_selector_token}{key}",), - path=f"{match.path}[{self.env.keys_selector_token}][{i}]", root=match.root, ) match.add_child(_match) @@ -322,7 +315,6 @@ def resolve(self, matches: Iterable[JSONPathMatch]) -> Iterable[JSONPathMatch]: obj=obj, parent=match, parts=match.parts + (norm_index,), - path=f"{match.path}[{norm_index}]", root=match.root, ) match.add_child(_match) @@ -345,7 +337,6 @@ async def resolve_async( obj=obj, parent=match, parts=match.parts + (norm_index,), - path=f"{match.path}[{norm_index}]", root=match.root, ) match.add_child(_match) @@ -384,7 +375,6 @@ def resolve(self, matches: Iterable[JSONPathMatch]) -> Iterable[JSONPathMatch]: obj=val, parent=match, parts=match.parts + (key,), - path=match.path + f"['{key}']", root=match.root, ) match.add_child(_match) @@ -396,7 +386,6 @@ def resolve(self, matches: Iterable[JSONPathMatch]) -> Iterable[JSONPathMatch]: obj=val, parent=match, parts=match.parts + (i,), - path=f"{match.path}[{i}]", root=match.root, ) match.add_child(_match) @@ -413,7 +402,6 @@ async def resolve_async( obj=val, parent=match, parts=match.parts + (key,), - path=match.path + f"['{key}']", root=match.root, ) match.add_child(_match) @@ -425,7 +413,6 @@ async def resolve_async( obj=val, parent=match, parts=match.parts + (i,), - path=f"{match.path}[{i}]", root=match.root, ) match.add_child(_match) @@ -458,7 +445,6 @@ def _expand(self, match: JSONPathMatch) -> Iterable[JSONPathMatch]: obj=val, parent=match, parts=match.parts + (key,), - path=match.path + f"['{key}']", root=match.root, ) match.add_child(_match) @@ -474,7 +460,6 @@ def _expand(self, match: JSONPathMatch) -> Iterable[JSONPathMatch]: obj=val, parent=match, parts=match.parts + (i,), - path=f"{match.path}[{i}]", root=match.root, ) match.add_child(_match) @@ -609,7 +594,6 @@ def resolve( # noqa: PLR0912 obj=val, parent=match, parts=match.parts + (key,), - path=match.path + f"['{key}']", root=match.root, ) match.add_child(_match) @@ -635,7 +619,6 @@ def resolve( # noqa: PLR0912 obj=obj, parent=match, parts=match.parts + (i,), - path=f"{match.path}[{i}]", root=match.root, ) match.add_child(_match) @@ -677,7 +660,6 @@ async def resolve_async( # noqa: PLR0912 obj=val, parent=match, parts=match.parts + (key,), - path=match.path + f"['{key}']", root=match.root, ) match.add_child(_match) @@ -705,7 +687,6 @@ async def resolve_async( # noqa: PLR0912 obj=obj, parent=match, parts=match.parts + (i,), - path=f"{match.path}[{i}]", root=match.root, ) match.add_child(_match) diff --git a/jsonpath/stream.py b/jsonpath/stream.py deleted file mode 100644 index 4a38afb..0000000 --- a/jsonpath/stream.py +++ /dev/null @@ -1,99 +0,0 @@ -# noqa: D100 -from __future__ import annotations - -from collections import deque -from typing import Deque -from typing import Iterator - -from .exceptions import JSONPathSyntaxError -from .token import TOKEN_EOF -from .token import Token - -# ruff: noqa: D102 - - -class TokenStream: - """Step through or iterate a stream of tokens.""" - - def __init__(self, token_iter: Iterator[Token]): - self.iter = token_iter - self._pushed: Deque[Token] = deque() - self.current = Token("", "", -1, "") - next(self) - - class TokenStreamIterator: - """An iterable token stream.""" - - def __init__(self, stream: TokenStream): - self.stream = stream - - def __iter__(self) -> Iterator[Token]: - return self - - def __next__(self) -> Token: - tok = self.stream.current - if tok.kind is TOKEN_EOF: - self.stream.close() - raise StopIteration - next(self.stream) - return tok - - def __iter__(self) -> Iterator[Token]: - return self.TokenStreamIterator(self) - - def __next__(self) -> Token: - tok = self.current - if self._pushed: - self.current = self._pushed.popleft() - elif self.current.kind is not TOKEN_EOF: - try: - self.current = next(self.iter) - except StopIteration: - self.close() - return tok - - def __str__(self) -> str: # pragma: no cover - return f"current: {self.current}\nnext: {self.peek}" - - def next_token(self) -> Token: - """Return the next token from the stream.""" - return next(self) - - @property - def peek(self) -> Token: - """Look at the next token.""" - current = next(self) - result = self.current - self.push(current) - return result - - def push(self, tok: Token) -> None: - """Push a token back to the stream.""" - self._pushed.append(self.current) - self.current = tok - - def close(self) -> None: - """Close the stream.""" - self.current = Token(TOKEN_EOF, "", -1, "") - - def expect(self, *typ: str) -> None: - if self.current.kind not in typ: - if len(typ) == 1: - _typ = repr(typ[0]) - else: - _typ = f"one of {typ!r}" - raise JSONPathSyntaxError( - f"expected {_typ}, found {self.current.kind!r}", - token=self.current, - ) - - def expect_peek(self, *typ: str) -> None: - if self.peek.kind not in typ: - if len(typ) == 1: - _typ = repr(typ[0]) - else: - _typ = f"one of {typ!r}" - raise JSONPathSyntaxError( - f"expected {_typ}, found {self.peek.kind!r}", - token=self.peek, - ) diff --git a/jsonpath/token.py b/jsonpath/token.py index ed19394..f9b8ef9 100644 --- a/jsonpath/token.py +++ b/jsonpath/token.py @@ -1,7 +1,11 @@ """JSONPath tokens.""" import sys +from typing import Iterable +from typing import Iterator from typing import Tuple +from .exceptions import JSONPathSyntaxError + # Utility tokens TOKEN_EOF = sys.intern("EOF") TOKEN_ILLEGAL = sys.intern("ILLEGAL") @@ -119,3 +123,77 @@ def position(self) -> Tuple[int, int]: line_number = self.value.count("\n", 0, self.index) + 1 column_number = self.index - self.value.rfind("\n", 0, self.index) return (line_number, column_number - 1) + + +class TokenStream: + """Step through a stream of tokens.""" + + eof = Token(TOKEN_EOF, "", -1, "") + + __slots__ = ("tokens", "pos") + + def __init__(self, token_iter: Iterable[Token]): + self.tokens = tuple(token_iter) + self.pos = 0 + + def __iter__(self) -> Iterator[Token]: + return iter(self.tokens) + + @property + def current(self) -> Token: + """The current token in the stream. + + Returns EOF if we're at the end of the stream. + """ + try: + return self.tokens[self.pos] + except IndexError: + return self.eof + + @property + def peek(self) -> Token: + """The next token in the stream. + + Returns EOF if we're at or one away from the end of the stream. + """ + try: + return self.tokens[self.pos + 1] + except IndexError: + return self.eof + + def __next__(self) -> Token: + self.pos += 1 + return self.tokens[self.pos - 1] + + def next_token(self) -> Token: + """Return the current token ans advance the stream.""" + return next(self) + + def backup(self) -> None: + """Go back one token in the stream.""" + if self.pos > 0: + self.pos -= 1 + + def expect(self, *typ: str) -> None: + """Raise an exception if the current token's type is not in _typ_.""" + if self.current.kind not in typ: + if len(typ) == 1: + _typ = repr(typ[0]) + else: + _typ = f"one of {typ!r}" + raise JSONPathSyntaxError( + f"expected {_typ}, found {self.current.kind!r}", + token=self.current, + ) + + def expect_peek(self, *typ: str) -> None: + """Raise an exception if the next token's type is not in _typ_.""" + if self.peek.kind not in typ: + if len(typ) == 1: + _typ = repr(typ[0]) + else: + _typ = f"one of {typ!r}" + raise JSONPathSyntaxError( + f"expected {_typ}, found {self.peek.kind!r}", + token=self.peek, + ) diff --git a/performance/benchmark.py b/performance/benchmark.py new file mode 100644 index 0000000..072e05d --- /dev/null +++ b/performance/benchmark.py @@ -0,0 +1,90 @@ +import json +import timeit +from typing import Any +from typing import Mapping +from typing import NamedTuple +from typing import Sequence +from typing import Union + +# ruff: noqa: D100 D101 D103 T201 + + +class CTSCase(NamedTuple): + query: str + data: Union[Sequence[Any], Mapping[str, Any]] + + +def valid_queries() -> Sequence[CTSCase]: + with open("tests/cts/cts.json") as fd: + data = json.load(fd) + + return [ + (CTSCase(t["selector"], t["document"])) + for t in data["tests"] + if not t.get("invalid_selector", False) + ] + + +QUERIES = valid_queries() + +COMPILE_AND_FIND_SETUP = "from jsonpath import findall" + +COMPILE_AND_FIND_STMT = """\ +for query, data in QUERIES: + findall(query, data)""" + +JUST_COMPILE_SETUP = "from jsonpath import compile" + +JUST_COMPILE_STMT = """\ +for query, _ in QUERIES: + compile(query)""" + +JUST_FIND_SETUP = """\ +from jsonpath import compile +compiled_queries = [(compile(q), d) for q, d in QUERIES] +""" + +JUST_FIND_STMT = """\ +for path, data in compiled_queries: + path.findall(data)""" + + +def benchmark(number: int = 100, best_of: int = 3) -> None: + print( + f"repeating {len(QUERIES)} queries on small data {number} times, " + f"best of {best_of} rounds" + ) + + results = timeit.repeat( + COMPILE_AND_FIND_STMT, + setup=COMPILE_AND_FIND_SETUP, + globals={"QUERIES": QUERIES}, + number=number, + repeat=best_of, + ) + + print("compile and find", results) + + results = timeit.repeat( + JUST_COMPILE_STMT, + setup=JUST_COMPILE_SETUP, + globals={"QUERIES": QUERIES}, + number=number, + repeat=best_of, + ) + + print("just compile", results) + + results = timeit.repeat( + JUST_FIND_STMT, + setup=JUST_FIND_SETUP, + globals={"QUERIES": QUERIES}, + number=number, + repeat=best_of, + ) + + print("just find", results) + + +if __name__ == "__main__": + benchmark() diff --git a/performance/pprofile.py b/performance/pprofile.py new file mode 100644 index 0000000..37df002 --- /dev/null +++ b/performance/pprofile.py @@ -0,0 +1,100 @@ +import cProfile +import json +import sys +from pathlib import Path +from typing import Any +from typing import Mapping +from typing import NamedTuple +from typing import Sequence +from typing import Union + +from jsonpath import compile +from jsonpath import findall + +# ruff: noqa: D100 D101 D103 T201 + + +class CTSCase(NamedTuple): + query: str + data: Union[Sequence[Any], Mapping[str, Any]] + + +def valid_queries() -> Sequence[CTSCase]: + with open("tests/cts/cts.json") as fd: + data = json.load(fd) + + return [ + (CTSCase(t["selector"], t["document"])) + for t in data["tests"] + if not t.get("invalid_selector", False) + ] + + +QUERIES = valid_queries() + +COMPILE_AND_FIND_STMT = """\ +for _ in range(100): + for query, data in QUERIES: + findall(query, data)""" + + +JUST_COMPILE_STMT = """\ +for _ in range(100): + for query, _ in QUERIES: + compile(query)""" + +JUST_FIND_STMT = """\ +for _ in range(100): + for path, data in compiled_queries: + path.findall(data)""" + + +def profile_compile_and_find() -> None: + cProfile.runctx( + COMPILE_AND_FIND_STMT, + globals={"findall": findall, "QUERIES": QUERIES}, + locals={}, + sort="cumtime", + ) + + +def profile_just_compile() -> None: + cProfile.runctx( + JUST_COMPILE_STMT, + globals={"compile": compile, "QUERIES": QUERIES}, + locals={}, + sort="cumtime", + ) + + +def profile_just_find() -> None: + compiled_queries = [(compile(q), d) for q, d in QUERIES] + + cProfile.runctx( + JUST_FIND_STMT, + globals={"compiled_queries": compiled_queries}, + locals={}, + sort="cumtime", + ) + + +if __name__ == "__main__": + file_path = Path(__file__) + usage = ( + f"usage: {file_path.name} (--compile-and-find | --just-find | --just-compile)\n" + ) + + if len(sys.argv) < 2: # noqa: PLR2004 + sys.stderr.write(usage) + sys.exit(1) + + arg = sys.argv[1] + if arg == "--compile-and-find": + profile_compile_and_find() + elif arg == "--just-find": + profile_just_find() + elif arg == "--just-compile": + profile_just_compile() + else: + sys.stderr.write(usage) + sys.exit(1)