diff --git a/altair/_magics.py b/altair/_magics.py index 05186367e..8f40080db 100644 --- a/altair/_magics.py +++ b/altair/_magics.py @@ -1,23 +1,18 @@ """Magic functions for rendering vega-lite specifications.""" -__all__ = ["vegalite"] +from __future__ import annotations import json import warnings +from importlib.util import find_spec +from typing import Any -import IPython from IPython.core import magic_arguments from narwhals.dependencies import is_pandas_dataframe as _is_pandas_dataframe from altair.vegalite import v5 as vegalite_v5 -try: - import yaml - - YAML_AVAILABLE = True -except ImportError: - YAML_AVAILABLE = False - +__all__ = ["vegalite"] RENDERERS = { "vega-lite": { @@ -48,19 +43,21 @@ def _prepare_data(data, data_transformers): return data -def _get_variable(name): +def _get_variable(name: str) -> Any: """Get a variable from the notebook namespace.""" - ip = IPython.get_ipython() - if ip is None: + from IPython.core.getipython import get_ipython + + if ip := get_ipython(): + if name not in ip.user_ns: + msg = f"argument '{name}' does not match the name of any defined variable" + raise NameError(msg) + return ip.user_ns[name] + else: msg = ( "Magic command must be run within an IPython " "environment, in which get_ipython() is defined." ) raise ValueError(msg) - if name not in ip.user_ns: - msg = f"argument '{name}' does not match the name of any defined variable" - raise NameError(msg) - return ip.user_ns[name] @magic_arguments.magic_arguments() @@ -71,7 +68,7 @@ def _get_variable(name): ) @magic_arguments.argument("-v", "--version", dest="version", default="v5") @magic_arguments.argument("-j", "--json", dest="json", action="store_true") -def vegalite(line, cell): +def vegalite(line, cell) -> vegalite_v5.VegaLite: """ Cell magic for displaying vega-lite visualizations in CoLab. @@ -91,7 +88,7 @@ def vegalite(line, cell): if args.json: spec = json.loads(cell) - elif not YAML_AVAILABLE: + elif not find_spec("yaml"): try: spec = json.loads(cell) except json.JSONDecodeError as err: @@ -101,6 +98,8 @@ def vegalite(line, cell): ) raise ValueError(msg) from err else: + import yaml + spec = yaml.load(cell, Loader=yaml.SafeLoader) if args.data is not None: diff --git a/altair/utils/_dfi_types.py b/altair/utils/_dfi_types.py index 086db6800..1b47ddd08 100644 --- a/altair/utils/_dfi_types.py +++ b/altair/utils/_dfi_types.py @@ -79,6 +79,7 @@ def dtype(self) -> tuple[Any, int, str, str]: - Data types not included: complex, Arrow-style null, binary, decimal, and nested (list, struct, map, union) dtypes. """ + ... # Have to use a generic Any return type as not all libraries who implement # the dataframe interchange protocol implement the TypedDict that is usually @@ -106,6 +107,7 @@ def describe_categorical(self) -> Any: TBD: are there any other in-memory representations that are needed? """ + ... class DataFrame(Protocol): @@ -137,12 +139,15 @@ def __dataframe__( necessary if a library supports strided buffers, given that this protocol specifies contiguous buffers. """ + ... def column_names(self) -> Iterable[str]: """Return an iterator yielding the column names.""" + ... def get_column_by_name(self, name: str) -> Column: """Return the column whose name is the indicated name.""" + ... def get_chunks(self, n_chunks: int | None = None) -> Iterable[DataFrame]: """ @@ -156,3 +161,4 @@ def get_chunks(self, n_chunks: int | None = None) -> Iterable[DataFrame]: Note that the producer must ensure that all columns are chunked the same way. """ + ... diff --git a/altair/utils/core.py b/altair/utils/core.py index 9143a1ec5..669b81a06 100644 --- a/altair/utils/core.py +++ b/altair/utils/core.py @@ -222,7 +222,7 @@ def __dataframe__( def infer_vegalite_type_for_pandas( - data: object, + data: Any, ) -> InferredVegaLiteType | tuple[InferredVegaLiteType, list[Any]]: """ From an array-like input, infer the correct vega typecode. @@ -231,7 +231,7 @@ def infer_vegalite_type_for_pandas( Parameters ---------- - data: object + data: Any """ # This is safe to import here, as this function is only called on pandas input. from pandas.api.types import infer_dtype @@ -738,10 +738,10 @@ def use_signature(tp: Callable[P, Any], /): """ @overload - def decorate(cb: WrapsMethod[T, R], /) -> WrappedMethod[T, P, R]: ... + def decorate(cb: WrapsMethod[T, R], /) -> WrappedMethod[T, P, R]: ... # pyright: ignore[reportOverlappingOverload] @overload - def decorate(cb: WrapsFunc[R], /) -> WrappedFunc[P, R]: ... + def decorate(cb: WrapsFunc[R], /) -> WrappedFunc[P, R]: ... # pyright: ignore[reportOverlappingOverload] def decorate(cb: WrapsFunc[R], /) -> WrappedMethod[T, P, R] | WrappedFunc[P, R]: """ @@ -857,7 +857,7 @@ def from_cache(cls) -> _ChannelCache: cached = _CHANNEL_CACHE except NameError: cached = cls.__new__(cls) - cached.channel_to_name = _init_channel_to_name() + cached.channel_to_name = _init_channel_to_name() # pyright: ignore[reportAttributeAccessIssue] cached.name_to_channel = _invert_group_channels(cached.channel_to_name) _CHANNEL_CACHE = cached return _CHANNEL_CACHE diff --git a/altair/vegalite/v5/display.py b/altair/vegalite/v5/display.py index 5d71a2934..dcc506958 100644 --- a/altair/vegalite/v5/display.py +++ b/altair/vegalite/v5/display.py @@ -125,10 +125,6 @@ def browser_renderer( vegalite_version=VEGALITE_VERSION, **metadata, ) - - if isinstance(mimebundle, tuple): - mimebundle = mimebundle[0] - html = mimebundle["text/html"] open_html_in_browser(html, using=using, port=port) return {} @@ -162,7 +158,9 @@ def browser_renderer( renderers.register("json", json_renderer) renderers.register("png", png_renderer) renderers.register("svg", svg_renderer) -renderers.register("jupyter", jupyter_renderer) +# FIXME: Caused by upstream # type: ignore[unreachable] +# https://github.com/manzt/anywidget/blob/b7961305a7304f4d3def1fafef0df65db56cf41e/anywidget/widget.py#L80-L81 +renderers.register("jupyter", jupyter_renderer) # pyright: ignore[reportArgumentType] renderers.register("browser", browser_renderer) renderers.register("olli", olli_renderer) renderers.enable("default") diff --git a/pyproject.toml b/pyproject.toml index a9fe93f8e..ae15a8a4b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -457,18 +457,12 @@ include=[ "./doc/*.py", "./tests/**/*.py", "./tools/**/*.py", + "./sphinxext/**/*.py", ] ignore=[ - "./altair/vegalite/v5/display.py", - "./altair/vegalite/v5/schema/", - "./altair/utils/core.py", - "./altair/utils/_dfi_types.py", - "./altair/_magics.py", - "./altair/jupyter/", - "./sphinxext/", - "./tests/test_jupyter_chart.py", - "./tests/utils/", - "./tests/test_magics.py", - "./tests/vegalite/v5/test_geo_interface.py", - "../../../**/Lib", # stdlib + "./altair/vegalite/v5/schema/channels.py", # 716 warns + "./altair/vegalite/v5/schema/mixins.py", # 1001 warns + "./altair/jupyter/", # Mostly untyped + "./tests/test_jupyter_chart.py", # Based on untyped module + "../../../**/Lib", # stdlib ] diff --git a/sphinxext/altairgallery.py b/sphinxext/altairgallery.py index 062807d61..ea3a17837 100644 --- a/sphinxext/altairgallery.py +++ b/sphinxext/altairgallery.py @@ -14,7 +14,7 @@ from docutils import nodes from docutils.parsers.rst import Directive from docutils.parsers.rst.directives import flag -from docutils.statemachine import ViewList +from docutils.statemachine import StringList from sphinx.util.nodes import nested_parse_with_titles from altair.utils.execeval import eval_block @@ -184,7 +184,7 @@ def save_example_pngs( else: # the file changed or the image file does not exist. Generate it. print(f"-> saving {image_file!s}") - chart = eval_block(code) + chart = eval_block(code, strict=True) try: chart.save(image_file) hashes[filename] = example_hash @@ -303,7 +303,7 @@ def run(self) -> list[Node]: ) # parse and return documentation - result = ViewList() + result = StringList() for line in include.split("\n"): result.append(line, "") node = nodes.paragraph() diff --git a/sphinxext/code_ref.py b/sphinxext/code_ref.py index 8f5f4da0e..2eba68a6a 100644 --- a/sphinxext/code_ref.py +++ b/sphinxext/code_ref.py @@ -108,7 +108,7 @@ def gen() -> Iterator[nodes.Node]: def theme_names() -> tuple[Sequence[str], Sequence[str]]: - names: set[VegaThemes] = set(get_args(VegaThemes)) + names: set[str] = set(get_args(VegaThemes)) carbon = {nm for nm in names if nm.startswith("carbon")} return ["default", *sorted(names - carbon)], sorted(carbon) @@ -180,8 +180,8 @@ class ThemeDirective(SphinxDirective): https://pyscript.net/ """ - has_content: ClassVar[Literal[False]] = False - required_arguments: ClassVar[Literal[1]] = 1 + has_content: ClassVar[bool] = False + required_arguments: ClassVar[int] = 1 option_spec = { "packages": validate_packages, "dropdown-label": directives.unchanged, @@ -226,14 +226,16 @@ def run(self) -> Sequence[nodes.Node]: ) results.append(raw_html("

\n")) return maybe_details( - results, self.options, default_summary="Show Vega-Altair Theme Test" + results, + self.options, # pyright: ignore[reportArgumentType] + default_summary="Show Vega-Altair Theme Test", ) class PyScriptDirective(SphinxDirective): """Placeholder for non-theme related directive.""" - has_content: ClassVar[Literal[False]] = False + has_content: ClassVar[bool] = False option_spec = {"packages": directives.unchanged} def run(self) -> Sequence[nodes.Node]: @@ -282,9 +284,9 @@ class CodeRefDirective(SphinxDirective): https://github.com/vega/sphinxext-altair """ - has_content: ClassVar[Literal[False]] = False - required_arguments: ClassVar[Literal[1]] = 1 - option_spec: ClassVar[dict[_Option, Callable[[str], Any]]] = { + has_content: ClassVar[bool] = False + required_arguments: ClassVar[int] = 1 + option_spec: ClassVar[dict[_Option, Callable[[str], Any]]] = { # pyright: ignore[reportIncompatibleVariableOverride] "output": validate_output, "fold": directives.flag, "summary": directives.unchanged_required, @@ -302,8 +304,8 @@ def __init__( state: RSTState, state_machine: RSTStateMachine, ) -> None: - super().__init__(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine) # fmt: skip - self.options: dict[_Option, Any] + super().__init__(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine) # fmt: skip # pyright: ignore[reportArgumentType] + self.options: dict[_Option, Any] # pyright: ignore[reportIncompatibleVariableOverride] def run(self) -> Sequence[nodes.Node]: qual_name = self.arguments[0] diff --git a/sphinxext/schematable.py b/sphinxext/schematable.py index 5daaba4c5..323f8dfe7 100644 --- a/sphinxext/schematable.py +++ b/sphinxext/schematable.py @@ -8,7 +8,7 @@ from docutils import frontend, nodes, utils from docutils.parsers.rst import Directive from docutils.parsers.rst.directives import flag -from myst_parser.docutils_ import Parser +from myst_parser.parsers.docutils_ import Parser from sphinx import addnodes from tools.schemapi.utils import SchemaInfo, fix_docstring_issues @@ -95,7 +95,7 @@ def add_text(node: nodes.paragraph, text: str) -> nodes.paragraph: for part in reCode.split(text): if part: if is_text: - node += nodes.Text(part, part) + node += nodes.Text(part, part) # pyright: ignore[reportCallIssue] else: node += nodes.literal(part, part) @@ -108,7 +108,7 @@ def build_row( item: tuple[str, dict[str, Any]], rootschema: dict[str, Any] | None ) -> nodes.row: """Return nodes.row with property description.""" - prop, propschema, _ = item + prop, propschema = item row = nodes.row() # Property @@ -165,17 +165,16 @@ def build_schema_table( def select_items_from_schema( schema: dict[str, Any], props: list[str] | None = None -) -> Iterator[tuple[Any, Any, bool] | tuple[str, Any, bool]]: - """Return iterator (prop, schema.item, required) on prop, return all in None.""" +) -> Iterator[tuple[Any, Any] | tuple[str, Any]]: + """Return iterator (prop, schema.item) on prop, return all in None.""" properties = schema.get("properties", {}) - required = schema.get("required", []) if not props: for prop, item in properties.items(): - yield prop, item, prop in required + yield prop, item else: for prop in props: try: - yield prop, properties[prop], prop in required + yield prop, properties[prop] except KeyError as err: msg = f"Can't find property: {prop}" raise Exception(msg) from err diff --git a/sphinxext/utils.py b/sphinxext/utils.py index a27e4d340..743cf0913 100644 --- a/sphinxext/utils.py +++ b/sphinxext/utils.py @@ -46,7 +46,7 @@ def create_generic_image( arr = np.zeros((shape[0], shape[1], 3)) if gradient: # gradient from gray to white - arr += np.linspace(128, 255, shape[1])[:, None] + arr += np.linspace(128, 255, shape[1])[:, None] # pyright: ignore[reportCallIssue,reportArgumentType] im = Image.fromarray(arr.astype("uint8")) im.save(filename) @@ -138,12 +138,12 @@ def get_docstring_and_rest(filename: str) -> tuple[str, str | None, str, int]: try: # In python 3.7 module knows its docstring. # Everything else will raise an attribute error - docstring = node.docstring + docstring = node.docstring # pyright: ignore[reportAttributeAccessIssue] import tokenize from io import BytesIO - ts = tokenize.tokenize(BytesIO(content).readline) + ts = tokenize.tokenize(BytesIO(content).readline) # pyright: ignore[reportArgumentType] ds_lines = 0 # find the first string according to the tokenizer and get # it's end row @@ -163,7 +163,7 @@ def get_docstring_and_rest(filename: str) -> tuple[str, str | None, str, int]: and isinstance(node.body[0].value, (ast.Str, ast.Constant)) ): docstring_node = node.body[0] - docstring = docstring_node.value.s + docstring = docstring_node.value.s # pyright: ignore[reportAttributeAccessIssue] # python2.7: Code was read in bytes needs decoding to utf-8 # unless future unicode_literals is imported in source which # make ast output unicode strings @@ -203,8 +203,8 @@ def dict_hash(dct: dict[Any, Any]) -> Any: serialized = json.dumps(dct, sort_keys=True) try: - m = hashlib.sha256(serialized)[:32] + m = hashlib.sha256(serialized)[:32] # pyright: ignore[reportArgumentType,reportIndexIssue] except TypeError: - m = hashlib.sha256(serialized.encode())[:32] + m = hashlib.sha256(serialized.encode())[:32] # pyright: ignore[reportIndexIssue] return m.hexdigest() diff --git a/tests/__init__.py b/tests/__init__.py index 1cda56438..617cfca80 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -59,6 +59,17 @@ def windows_has_tzdata() -> bool: >>> hatch run test-slow --durations=25 # doctest: +SKIP """ +skip_requires_ipython: pytest.MarkDecorator = pytest.mark.skipif( + find_spec("IPython") is None, reason="`IPython` not installed." +) +""" +``pytest.mark.skipif`` decorator. + +Applies when `IPython`_ import would fail. + +.. _IPython: + https://github.com/ipython/ipython +""" skip_requires_vl_convert: pytest.MarkDecorator = pytest.mark.skipif( find_spec("vl_convert") is None, reason="`vl_convert` not installed." diff --git a/tests/test_magics.py b/tests/test_magics.py index e1775aaf5..f88c83dbb 100644 --- a/tests/test_magics.py +++ b/tests/test_magics.py @@ -1,68 +1,76 @@ +from __future__ import annotations + import json +from typing import TYPE_CHECKING, Any import pytest -try: - from IPython import InteractiveShell - - IPYTHON_AVAILABLE = True -except ImportError: - IPYTHON_AVAILABLE = False - -from altair.vegalite.v5 import VegaLite - -DATA_RECORDS = [ - {"amount": 28, "category": "A"}, - {"amount": 55, "category": "B"}, - {"amount": 43, "category": "C"}, - {"amount": 91, "category": "D"}, - {"amount": 81, "category": "E"}, - {"amount": 53, "category": "F"}, - {"amount": 19, "category": "G"}, - {"amount": 87, "category": "H"}, -] - -if IPYTHON_AVAILABLE: - _ipshell = InteractiveShell.instance() - _ipshell.run_cell("%load_ext altair") - _ipshell.run_cell( - f""" -import pandas as pd -table = pd.DataFrame.from_records({DATA_RECORDS}) -the_data = table -""" - ) +from altair.vegalite.v5.display import VegaLite +from tests import skip_requires_ipython + +if TYPE_CHECKING: + from IPython.core.interactiveshell import InteractiveShell + + +@pytest.fixture +def records() -> list[dict[str, Any]]: + return [ + {"amount": 28, "category": "A"}, + {"amount": 55, "category": "B"}, + {"amount": 43, "category": "C"}, + {"amount": 91, "category": "D"}, + {"amount": 81, "category": "E"}, + {"amount": 53, "category": "F"}, + {"amount": 19, "category": "G"}, + {"amount": 87, "category": "H"}, + ] -VEGALITE_SPEC = { - "$schema": "https://vega.github.io/schema/vega-lite/v5.json", - "data": {"values": DATA_RECORDS}, - "description": "A simple bar chart with embedded data.", - "encoding": { - "x": {"field": "category", "type": "ordinal"}, - "y": {"field": "amount", "type": "quantitative"}, - }, - "mark": {"type": "bar"}, -} +@pytest.fixture +def vl_spec(records) -> dict[str, Any]: + return { + "$schema": "https://vega.github.io/schema/vega-lite/v5.json", + "data": {"values": records}, + "description": "A simple bar chart with embedded data.", + "encoding": { + "x": {"field": "category", "type": "ordinal"}, + "y": {"field": "amount", "type": "quantitative"}, + }, + "mark": {"type": "bar"}, + } + + +@pytest.fixture +def ipshell(records) -> InteractiveShell: + from IPython.core.interactiveshell import InteractiveShell + + shell = InteractiveShell.instance() + shell.run_cell("%load_ext altair") + shell.run_cell( + f"import pandas as pd\n" + f"table = pd.DataFrame.from_records({records})\n" + f"the_data = table" + ) + return shell -@pytest.mark.skipif(not IPYTHON_AVAILABLE, reason="requires ipython") -def test_vegalite_magic_data_included(): - result = _ipshell.run_cell("%%vegalite\n" + json.dumps(VEGALITE_SPEC)) +@skip_requires_ipython +def test_vegalite_magic_data_included(ipshell, vl_spec) -> None: + result = ipshell.run_cell("%%vegalite\n" + json.dumps(vl_spec)) assert isinstance(result.result, VegaLite) - assert result.result.spec == VEGALITE_SPEC + assert result.result.spec == vl_spec -@pytest.mark.skipif(not IPYTHON_AVAILABLE, reason="requires ipython") -def test_vegalite_magic_json_flag(): - result = _ipshell.run_cell("%%vegalite --json\n" + json.dumps(VEGALITE_SPEC)) +@skip_requires_ipython +def test_vegalite_magic_json_flag(ipshell, vl_spec) -> None: + result = ipshell.run_cell("%%vegalite --json\n" + json.dumps(vl_spec)) assert isinstance(result.result, VegaLite) - assert result.result.spec == VEGALITE_SPEC + assert result.result.spec == vl_spec -@pytest.mark.skipif(not IPYTHON_AVAILABLE, reason="requires ipython") -def test_vegalite_magic_pandas_data(): - spec = {key: val for key, val in VEGALITE_SPEC.items() if key != "data"} - result = _ipshell.run_cell("%%vegalite table\n" + json.dumps(spec)) +@skip_requires_ipython +def test_vegalite_magic_pandas_data(ipshell, vl_spec) -> None: + spec = {key: val for key, val in vl_spec.items() if key != "data"} + result = ipshell.run_cell("%%vegalite table\n" + json.dumps(spec)) assert isinstance(result.result, VegaLite) - assert result.result.spec == VEGALITE_SPEC + assert result.result.spec == vl_spec diff --git a/tests/utils/test_compiler.py b/tests/utils/test_compiler.py index fb5521175..49ee31506 100644 --- a/tests/utils/test_compiler.py +++ b/tests/utils/test_compiler.py @@ -29,7 +29,9 @@ def assert_is_vega_spec(vega_spec): @skip_requires_vl_convert def test_vegalite_compiler(chart): vegalite_spec = chart.to_dict() - vega_spec = vegalite_compilers.get()(vegalite_spec) + fn = vegalite_compilers.get() + assert fn is not None + vega_spec = fn(vegalite_spec) assert_is_vega_spec(vega_spec) diff --git a/tests/utils/test_core.py b/tests/utils/test_core.py index d43f88dd8..95bed54c1 100644 --- a/tests/utils/test_core.py +++ b/tests/utils/test_core.py @@ -279,7 +279,7 @@ def channels_cached(channels) -> core._ChannelCache: """Previously ``_ChannelCache.from_channels``.""" cached = core._ChannelCache.__new__(core._ChannelCache) cached.channel_to_name = { - c: c._encoding_name + c: c._encoding_name # pyright: ignore[reportAttributeAccessIssue] for c in channels.__dict__.values() if isinstance(c, type) and issubclass(c, alt.SchemaBase) diff --git a/tests/utils/test_data.py b/tests/utils/test_data.py index 30b5b7f8e..b4d16d3dd 100644 --- a/tests/utils/test_data.py +++ b/tests/utils/test_data.py @@ -100,6 +100,7 @@ def test_dataframe_to_json(): - make certain the filename is deterministic - make certain the file contents match the data. """ + filename = "" data = _create_dataframe(10) try: result1 = _pipe(data, to_json) @@ -107,7 +108,8 @@ def test_dataframe_to_json(): filename = result1["url"] output = pd.read_json(filename) finally: - Path(filename).unlink() + if filename: + Path(filename).unlink() assert result1 == result2 assert output.equals(data) @@ -120,6 +122,7 @@ def test_dict_to_json(): - make certain the filename is deterministic - make certain the file contents match the data. """ + filename = "" data = _create_data_with_values(10) try: result1 = _pipe(data, to_json) @@ -127,7 +130,8 @@ def test_dict_to_json(): filename = result1["url"] output = pd.read_json(filename).to_dict(orient="records") finally: - Path(filename).unlink() + if filename: + Path(filename).unlink() assert result1 == result2 assert data == {"values": output} @@ -141,6 +145,7 @@ def test_dataframe_to_csv(tp: type[Any]) -> None: - make certain the filename is deterministic - make certain the file contents match the data. """ + filename: str = "" data = _create_dataframe(10, tp=tp) try: result1 = _pipe(data, to_csv) @@ -148,7 +153,8 @@ def test_dataframe_to_csv(tp: type[Any]) -> None: filename = result1["url"] output = tp(pd.read_csv(filename)) finally: - Path(filename).unlink() + if filename: + Path(filename).unlink() assert result1 == result2 assert output.equals(data) @@ -161,6 +167,7 @@ def test_dict_to_csv(): - make certain the filename is deterministic - make certain the file contents match the data. """ + filename = "" data = _create_data_with_values(10) try: result1 = _pipe(data, to_csv) @@ -168,7 +175,8 @@ def test_dict_to_csv(): filename = result1["url"] output = pd.read_csv(filename).to_dict(orient="records") finally: - Path(filename).unlink() + if filename: + Path(filename).unlink() assert result1 == result2 assert data == {"values": output} diff --git a/tests/utils/test_html.py b/tests/utils/test_html.py index 1ce63445f..121215145 100644 --- a/tests/utils/test_html.py +++ b/tests/utils/test_html.py @@ -20,8 +20,8 @@ def spec(): def test_spec_to_html(requirejs, fullhtml, spec): # We can't test that the html actually renders, but we'll test aspects of # it to make certain that the keywords are respected. - vegaembed_version = ("3.12",) - vegalite_version = ("3.0",) + vegaembed_version = "3.12" + vegalite_version = "3.0" vega_version = "4.0" html = spec_to_html( diff --git a/tests/utils/test_mimebundle.py b/tests/utils/test_mimebundle.py index 2688ff79c..bdb0b3cea 100644 --- a/tests/utils/test_mimebundle.py +++ b/tests/utils/test_mimebundle.py @@ -1,3 +1,7 @@ +from __future__ import annotations + +from typing import Any + import pytest import altair as alt @@ -7,7 +11,7 @@ @pytest.fixture -def vegalite_spec(): +def vegalite_spec() -> dict[str, Any]: return { "$schema": "https://vega.github.io/schema/vega-lite/v5.json", "description": "A simple bar chart with embedded data.", @@ -187,15 +191,15 @@ def test_spec_to_vegalite_mimebundle(vegalite_spec): def test_spec_to_vega_mimebundle(vega_spec): # ValueError: mode must be 'vega-lite' with pytest.raises(ValueError): # noqa: PT011 - spec_to_mimebundle( + spec_to_mimebundle( # pyright: ignore[reportCallIssue] spec=vega_spec, - mode="vega", + mode="vega", # pyright: ignore[reportArgumentType] format="vega", vega_version=alt.VEGA_VERSION, ) -def test_spec_to_json_mimebundle(): +def test_spec_to_json_mimebundle(vegalite_spec): bundle = spec_to_mimebundle( spec=vegalite_spec, mode="vega-lite", @@ -240,5 +244,6 @@ def test_vegafusion_chart_to_vega_mime_bundle(vegalite_spec): chart = alt.Chart.from_dict(vegalite_spec) with alt.data_transformers.enable("vegafusion"), alt.renderers.enable("json"): bundle = chart._repr_mimebundle_() + assert isinstance(bundle, tuple) vega_spec = bundle[0]["application/json"] check_pre_transformed_vega_spec(vega_spec) diff --git a/tests/utils/test_plugin_registry.py b/tests/utils/test_plugin_registry.py index bd741d2eb..051d2cdcf 100644 --- a/tests/utils/test_plugin_registry.py +++ b/tests/utils/test_plugin_registry.py @@ -38,7 +38,9 @@ def test_plugin_registry(): plugins.enable("new_plugin") assert plugins.names() == ["new_plugin"] assert plugins.active == "new_plugin" - assert plugins.get()(3) == 9 + fn = plugins.get() + assert fn is not None + assert fn(3) == 9 assert repr(plugins) == ( "TypedCallableRegistry(active='new_plugin', " "registered=['new_plugin'])" ) @@ -49,16 +51,22 @@ def test_plugin_registry_extra_options(): plugins.register("metadata_plugin", lambda x, p=2: x**p) plugins.enable("metadata_plugin") - assert plugins.get()(3) == 9 + fn = plugins.get() + assert fn is not None + assert fn(3) == 9 plugins.enable("metadata_plugin", p=3) assert plugins.active == "metadata_plugin" - assert plugins.get()(3) == 27 + fn = plugins.get() + assert fn is not None + assert fn(3) == 27 # enabling without changing name plugins.enable(p=2) assert plugins.active == "metadata_plugin" - assert plugins.get()(3) == 9 + fn = plugins.get() + assert fn is not None + assert fn(3) == 9 def test_plugin_registry_global_settings(): diff --git a/tests/utils/test_schemapi.py b/tests/utils/test_schemapi.py index fa5e83aff..8772f8196 100644 --- a/tests/utils/test_schemapi.py +++ b/tests/utils/test_schemapi.py @@ -251,15 +251,15 @@ def test_simple_type(): def test_simple_array(): assert SimpleArray([4, 5, "six"]).to_dict() == [4, 5, "six"] - assert SimpleArray.from_dict(list("abc")).to_dict() == list("abc") + assert SimpleArray.from_dict(list("abc")).to_dict() == list("abc") # pyright: ignore[reportArgumentType] def test_definition_union(): - obj = DefinitionUnion.from_dict("A") + obj = DefinitionUnion.from_dict("A") # pyright: ignore[reportArgumentType] assert isinstance(obj, Bar) assert obj.to_dict() == "A" - obj = DefinitionUnion.from_dict("B") + obj = DefinitionUnion.from_dict("B") # pyright: ignore[reportArgumentType] assert isinstance(obj, Bar) assert obj.to_dict() == "B" @@ -445,7 +445,7 @@ def chart_error_example__hconcat(): alt.Chart(source) .mark_text() .encode( - alt.Text("Horsepower:N", title={"text": "Horsepower", "align": "right"}) + alt.Text("Horsepower:N", title={"text": "Horsepower", "align": "right"}) # pyright: ignore[reportArgumentType] ) ) @@ -460,7 +460,7 @@ def chart_error_example__invalid_y_option_value_unknown_x_option(): .mark_bar() .encode( x=alt.X("variety", unknown=2), - y=alt.Y("sum(yield)", stack="asdf"), + y=alt.Y("sum(yield)", stack="asdf"), # pyright: ignore[reportArgumentType] ) ) @@ -472,7 +472,7 @@ def chart_error_example__invalid_y_option_value(): .mark_bar() .encode( x=alt.X("variety"), - y=alt.Y("sum(yield)", stack="asdf"), + y=alt.Y("sum(yield)", stack="asdf"), # pyright: ignore[reportArgumentType] ) ) @@ -486,7 +486,7 @@ def chart_error_example__invalid_y_option_value_with_condition(): .mark_bar() .encode( x="variety", - y=alt.Y("sum(yield)", stack="asdf"), + y=alt.Y("sum(yield)", stack="asdf"), # pyright: ignore[reportArgumentType] opacity=alt.condition("datum.yield > 0", alt.value(1), alt.value(0.2)), ) ) @@ -494,7 +494,7 @@ def chart_error_example__invalid_y_option_value_with_condition(): def chart_error_example__invalid_timeunit_value(): # Error: Invalid value for Angle.timeUnit - return alt.Chart().encode(alt.Angle().timeUnit("invalid_value")) + return alt.Chart().encode(alt.Angle().timeUnit("invalid_value")) # pyright: ignore[reportArgumentType] def chart_error_example__invalid_sort_value(): @@ -507,13 +507,13 @@ def chart_error_example__invalid_bandposition_value(): return ( alt.Chart(data.cars()) .mark_text(align="right") - .encode(alt.Text("Horsepower:N", bandPosition="4")) + .encode(alt.Text("Horsepower:N", bandPosition="4")) # pyright: ignore[reportArgumentType] ) def chart_error_example__invalid_type(): # Error: Invalid value for type - return alt.Chart().encode(alt.X(type="unknown")) + return alt.Chart().encode(alt.X(type="unknown")) # pyright: ignore[reportArgumentType] def chart_error_example__additional_datum_argument(): @@ -539,21 +539,21 @@ def chart_error_example__wrong_tooltip_type_in_faceted_chart(): return ( alt.Chart(pd.DataFrame({"a": [1]})) .mark_point() - .encode(tooltip=[{"wrong"}]) + .encode(tooltip=[{"wrong"}]) # pyright: ignore[reportArgumentType] .facet() ) def chart_error_example__wrong_tooltip_type_in_layered_chart(): # Error: Wrong data type to pass to tooltip - return alt.layer(alt.Chart().mark_point().encode(tooltip=[{"wrong"}])) + return alt.layer(alt.Chart().mark_point().encode(tooltip=[{"wrong"}])) # pyright: ignore[reportArgumentType] def chart_error_example__two_errors_in_layered_chart(): # Error 1: Wrong data type to pass to tooltip # Error 2: `Color` has no parameter named 'invalidArgument' return alt.layer( - alt.Chart().mark_point().encode(tooltip=[{"wrong"}]), + alt.Chart().mark_point().encode(tooltip=[{"wrong"}]), # pyright: ignore[reportArgumentType] alt.Chart().mark_line().encode(alt.Color(invalidArgument="unknown")), ) @@ -595,7 +595,7 @@ def chart_error_example__two_errors_with_one_in_nested_layered_chart(): blue_bars = ( alt.Chart(source) - .encode(alt.X("Day:O").scale(invalidOption=10), alt.Y("Value:Q")) + .encode(alt.X("Day:O").scale(invalidOption=10), alt.Y("Value:Q")) # pyright: ignore[reportCallIssue] .mark_bar() ) red_bars = ( @@ -635,7 +635,7 @@ def chart_error_example__four_errors_hide_fourth(): .mark_bar() .encode( x=alt.X("variety", unknown=2), - y=alt.Y("sum(yield)", stack="asdf"), + y=alt.Y("sum(yield)", stack="asdf"), # pyright: ignore[reportArgumentType] color=alt.Color("variety", another_unknown=2), opacity=alt.Opacity("variety", fourth_error=1), ) diff --git a/tests/utils/test_utils.py b/tests/utils/test_utils.py index bb2d049c3..c3b329cf0 100644 --- a/tests/utils/test_utils.py +++ b/tests/utils/test_utils.py @@ -75,7 +75,7 @@ def test_sanitize_dataframe(): if str(df[col].dtype).startswith("datetime"): # astype(datetime) introduces time-zone issues: # to_datetime() does not. - utc = isinstance(df[col].dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + utc = isinstance(df[col].dtype, pd.DatetimeTZDtype) df2[col] = pd.to_datetime(df2[col], utc=utc) else: df2[col] = df2[col].astype(df[col].dtype) diff --git a/tests/vegalite/v5/test_geo_interface.py b/tests/vegalite/v5/test_geo_interface.py index 6a689a5d5..eca901e26 100644 --- a/tests/vegalite/v5/test_geo_interface.py +++ b/tests/vegalite/v5/test_geo_interface.py @@ -1,11 +1,20 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + import pytest import altair.vegalite.v5 as alt +if TYPE_CHECKING: + from collections.abc import MutableMapping + + from altair.utils.data import SupportsGeoInterface -def geom_obj(geom): + +def geom_obj(geom: dict[str, Any]) -> SupportsGeoInterface: class Geom: - pass + __geo_interface__: MutableMapping[str, Any] geom_obj = Geom() geom_obj.__geo_interface__ = geom @@ -13,8 +22,8 @@ class Geom: # correct translation of Polygon geometry to Feature type -def test_geo_interface_polygon_feature(): - geom = { +def test_geo_interface_polygon_feature() -> None: + geom: dict[str, Any] = { "coordinates": [[(0, 0), (0, 2), (2, 2), (2, 0), (0, 0)]], "type": "Polygon", } @@ -26,7 +35,7 @@ def test_geo_interface_polygon_feature(): # merge geometry with empty properties dictionary -def test_geo_interface_removal_empty_properties(): +def test_geo_interface_removal_empty_properties() -> None: geom = { "geometry": { "coordinates": [ @@ -46,7 +55,7 @@ def test_geo_interface_removal_empty_properties(): # only register metadata in the properties member -def test_geo_interface_register_foreign_member(): +def test_geo_interface_register_foreign_member() -> None: geom = { "geometry": { "coordinates": [ @@ -68,7 +77,7 @@ def test_geo_interface_register_foreign_member(): # correct serializing of arrays and nested tuples -def test_geo_interface_serializing_arrays_tuples(): +def test_geo_interface_serializing_arrays_tuples() -> None: import array as arr geom = { @@ -96,7 +105,7 @@ def test_geo_interface_serializing_arrays_tuples(): # overwrite existing 'type' value in properties with `Feature` -def test_geo_interface_reserved_members(): +def test_geo_interface_reserved_members() -> None: geom = { "geometry": { "coordinates": [ @@ -116,7 +125,7 @@ def test_geo_interface_reserved_members(): # an empty FeatureCollection is valid -def test_geo_interface_empty_feature_collection(): +def test_geo_interface_empty_feature_collection() -> None: geom = {"type": "FeatureCollection", "features": []} feat = geom_obj(geom) @@ -126,7 +135,7 @@ def test_geo_interface_empty_feature_collection(): # Features in a FeatureCollection only keep properties and geometry -def test_geo_interface_feature_collection(): +def test_geo_interface_feature_collection() -> None: geom = { "type": "FeatureCollection", "features": [ @@ -170,7 +179,7 @@ def test_geo_interface_feature_collection(): # notic that the index value is registerd as a commonly used identifier # with the name "id" (in this case 49). Similar to serialization of a # pandas DataFrame is the index not included in the output -def test_geo_interface_feature_collection_gdf(): +def test_geo_interface_feature_collection_gdf() -> None: geom = { "bbox": (19.89, -26.82, 29.43, -17.66), "features": [